repo_name
stringlengths 6
61
| path
stringlengths 4
230
| copies
stringlengths 1
3
| size
stringlengths 4
6
| text
stringlengths 1.01k
850k
| license
stringclasses 15
values | hash
int64 -9,220,477,234,079,998,000
9,219,060,020B
| line_mean
float64 11.6
96.6
| line_max
int64 32
939
| alpha_frac
float64 0.26
0.9
| autogenerated
bool 1
class | ratio
float64 1.62
6.1
| config_test
bool 2
classes | has_no_keywords
bool 2
classes | few_assignments
bool 1
class |
---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
maciekswat/Twedit | Messaging/__init__.py | 1 | 5405 | ## {{{ http://code.activestate.com/recipes/144838/ (r1)
#messaging.py
#this is a module used for messaging. It allows multiple classes
#to handle various types of messages. It should work on all python
#versions >= 1.5.2
# These are useful slides explaining bacis usage of regex in Notepad++
# http://www.slideshare.net/anjesh/the-power-of-regular-expression-use-in-notepad
# example: to replace print.... statements with dbgMsg(....) put print[\s]*(.*) in find box and dbgMsg(\1) in replace box \1 refers to group of characters matched by (.*) if we have another group (...) we would use \2 to refer to it
import sys, string, exceptions
#this flag determines whether debug output is sent to debug handlers themselves
debug = 1
def setDebugging(debugging):
global debug
debug = debugging
class MessagingException(exceptions.Exception):
"""an exception class for any errors that may occur in
a messaging function"""
def __init__(self, args=None):
self.args = args
class FakeException(exceptions.Exception):
"""an exception that is thrown and then caught
to get a reference to the current execution frame"""
pass
class MessageHandler:
"""All message handlers should inherit this class. Each method will be
passed a string when the executing program passes calls a messaging function"""
def handleStdMsg(self, msg):
"""do something with a standard message from the program"""
pass
def handleErrMsg(self, msg):
"""do something with an error message. This will already include the
class, method, and line of the call"""
pass
def handleDbgMsg(self, msg):
"""do something with a debug message. This will already include the
class, method, and line of the call"""
pass
class defaultMessageHandler(MessageHandler):
"""This is a default message handler. It simply spits all strings to
standard out"""
def handleStdMsg(self, msg):
sys.stdout.write(msg + "\n")
def handleErrMsg(self, msg):
sys.stderr.write(msg + "\n")
def handleDbgMsg(self, msg):
sys.stdout.write(msg + "\n")
#this keeps track of the handlers
_messageHandlers = []
#call this with the handler to register it for receiving messages
def registerMessageHandler(handler):
"""we're not going to check for inheritance, but we should check to make
sure that it has the correct methods"""
for methodName in ["handleStdMsg", "handleErrMsg", "handleDbgMsg"]:
try:
getattr(handler, methodName)
except:
raise MessagingException, "The class " + handler.__class__.__name__ + " is missing a " + methodName + " method"
_messageHandlers.append(handler)
def getCallString(level):
#this gets us the frame of the caller and will work
#in python versions 1.5.2 and greater (there are better
#ways starting in 2.1
try:
raise FakeException("this is fake")
except Exception, e:
#get the current execution frame
f = sys.exc_info()[2].tb_frame
#go back as many call-frames as was specified
while level >= 0:
f = f.f_back
level = level-1
#if there is a self variable in the caller's local namespace then
#we'll make the assumption that the caller is a class method
obj = f.f_locals.get("self", None)
functionName = f.f_code.co_name
if obj:
callStr = obj.__class__.__name__+"::"+f.f_code.co_name+" (line "+str(f.f_lineno)+")"
else:
callStr = f.f_code.co_name+" (line "+str(f.f_lineno)+")"
return callStr
#send this message to all handlers of std messages
def stdMsg(*args):
stdStr = string.join(map(str, args), " ")
for handler in _messageHandlers:
handler.handleStdMsg(stdStr)
#send this message to all handlers of error messages
def errMsg(*args):
errStr = "Error in "+getCallString(1)+" : "+string.join(map(str, args), " ")
for handler in _messageHandlers:
handler.handleErrMsg(errStr)
#send this message to all handlers of debug messages
def dbgMsg(*args):
if not debug:
return
errStr = getCallString(1)+" : "+string.join(map(str, args), " ")
for handler in _messageHandlers:
handler.handleDbgMsg(errStr)
def pd(*args):
# dbgMsg(*args)
if not debug:
return
errStr = getCallString(1)+" : "+string.join(map(str, args), " ")
for handler in _messageHandlers:
handler.handleDbgMsg(errStr)
registerMessageHandler(defaultMessageHandler())
#end of messaging.py
#test.py
#here is a simple use case for the above module
# from messaging import stdMsg, dbgMsg, errMsg, setDebugging
# setDebugging(0)
# dbgMsg("this won't be printed")
# stdMsg("but this will")
# setDebugging(1)
# def foo():
# dbgMsg("this is a debug message in", "foo")
# class bar:
# def baz(self):
# errMsg("this is an error message in bar")
# foo()
# b = bar()
# b.baz()
# #end of test.py
# output is :
# but this will
# foo (line 12) : this is a debug message in foo
# Error in bar::baz (line 16) : this is an error message in bar
# ## end of http://code.activestate.com/recipes/144838/ }}}
| gpl-3.0 | -8,712,596,599,570,487,000 | 32.870968 | 232 | 0.638483 | false | 3.766551 | false | false | false |
matachi/hydra | blog/models.py | 1 | 1621 | from django.core.urlresolvers import reverse
from django.db import models
from django.db.models.signals import pre_save
from django.dispatch import receiver
from django.template.defaultfilters import slugify, stringformat
import markdown
import re
from bs4 import BeautifulSoup
class Tag(models.Model):
name = models.CharField(max_length=50, unique=True)
slug = models.SlugField(max_length=50, unique=True)
def __str__(self):
return self.name
class Post(models.Model):
title = models.CharField(max_length=200)
date = models.DateTimeField()
content = models.TextField()
content_html = models.TextField(blank=True)
tags = models.ManyToManyField(Tag, related_name='posts', blank=True)
slug = models.SlugField(max_length=200, unique=True, blank=True)
def get_absolute_url(self):
return reverse('blog:post', kwargs={
'year': self.date.year,
'month': stringformat(self.date.month, '02d'),
'day': stringformat(self.date.day, '02d'),
'slug': self.slug
})
def __str__(self):
return self.title
@receiver(pre_save, sender=Post)
def pre_save_post(**kwargs):
content_html = markdown.markdown(kwargs['instance'].content,
extensions=['codehilite'])
soup = BeautifulSoup(content_html)
for tag in soup.find_all(re.compile(r'h\d')):
if tag.parent is soup:
tag.name = 'h%d' % (int(tag.name[1]) + 1)
kwargs['instance'].content_html = str(soup)
kwargs['instance'].slug = slugify(
kwargs['instance'].title.replace('.', '-'))
| mit | 2,736,807,335,183,461,400 | 32.081633 | 72 | 0.648982 | false | 3.778555 | false | false | false |
erdc/proteus | proteus/tests/HotStart_3P/twp_navier_stokes_p.py | 1 | 7823 | from __future__ import absolute_import
from builtins import object
from proteus import *
from proteus.default_p import *
from .NS_hotstart import *
from proteus.mprans import RANS3PF
LevelModelType = RANS3PF.LevelModel
name="momentum_eqn"
coefficients = RANS3PF.Coefficients(epsFact=epsFact_viscosity,
sigma=0.0,
rho_0 = rho_0,
nu_0 = nu_0,
rho_1 = rho_1,
nu_1 = nu_1,
g=g,
nd=nd,
ME_model=0,
PRESSURE_model=2,
SED_model=None,
VOS_model=None,
VOF_model=None,
LS_model=None,
Closure_0_model=None,
Closure_1_model=None,
epsFact_density=epsFact_density,
stokes=False,
useVF=useVF,
useRBLES=useRBLES,
useMetrics=useMetrics,
eb_adjoint_sigma=1.0,
eb_penalty_constant=weak_bc_penalty_constant,
forceStrongDirichlet=ns_forceStrongDirichlet,
turbulenceClosureModel=ns_closure,
movingDomain=movingDomain,
dragAlpha=dragAlpha,
PSTAB=1.0,
cE=cE,
cMax=cMax,
CORRECT_VELOCITY=CORRECT_VELOCITY)
#######################
# BOUNDARY CONDITIONS #
#######################
def getDBC_u(x,flag):
#None
pi = np.pi
if (flag==1 or flag==2 or flag==3 or flag==4):
if manufactured_solution == 1:
return lambda x,t: np.sin(x[0])*np.sin(x[1]+t)
else:
return lambda x,t: np.sin(pi*x[0])*np.cos(pi*x[1])*np.sin(t)
def getDBC_v(x,flag):
#None
pi = np.pi
if (flag==1 or flag==2 or flag==3 or flag==4):
if manufactured_solution == 1:
return lambda x,t: np.cos(x[0])*np.cos(x[1]+t)
else:
return lambda x,t: -np.cos(pi*x[0])*np.sin(pi*x[1])*np.sin(t)
def getAFBC_u(x,flag):
None
def getAFBC_v(x,flag):
None
def getDFBC_u(x,flag):
# Set grad(u).n
None
def getDFBC_v(x,flag):
None
dirichletConditions = {0:getDBC_u,
1:getDBC_v}
advectiveFluxBoundaryConditions = {0:getAFBC_u,
1:getAFBC_v}
diffusiveFluxBoundaryConditions = {0:{0:getDFBC_u},
1:{1:getDFBC_v}}
######################
# INITIAL CONDITIONS #
######################
class AtRest(object):
def __init__(self):
pass
def uOfXT(self,x,t):
return 0.0
class velx_at_t0(object):
def __init__(self):
pass
def uOfXT(self,x,t):
if manufactured_solution == 1:
return np.sin(x[0])*np.sin(x[1])
else:
return 0.
class vely_at_t0(object):
def __init__(self):
pass
def uOfXT(self,x,t):
if manufactured_solution == 1:
return np.cos(x[0])*np.cos(x[1])
else:
return 0.
initialConditions = {0:velx_at_t0(),
1:vely_at_t0()}
#############################
# MATERIAL PARAMETER FIELDS #
#############################
def density(X,t):
x = X[0]
y = X[1]
return np.sin(x+y+t)**2+1
mu_constant=False
def dynamic_viscosity(X,t):
x = X[0]
y = X[1]
return mu*(np.cos(x+y+t)**2+1)
materialParameters = {'density':density,
'dynamic_viscosity':dynamic_viscosity}
###############
# FORCE TERMS #
###############
def forcex(X,t):
x = X[0]
y = X[1]
rho = density(X,t)
#pi = np.pi
if manufactured_solution == 1: #u.n!=0
return (rho*np.sin(x)*np.cos(y+t) # Time derivative
+ rho*np.sin(x)*np.cos(x) # Non-linearity
- (0. if KILL_PRESSURE_TERM==True else 1.)*np.sin(x)*np.sin(y+t) # Pressure
+ (2*dynamic_viscosity(X,t)*np.sin(x)*np.sin(y+t) # Diffusion
+(0. if mu_constant==True else 1.)*mu*2*np.cos(x+y+t)*np.sin(x+y+t)*(np.cos(x)*np.sin(y+t)+np.sin(x)*np.cos(y+t))
+(0. if mu_constant==True else 1.)*mu*2*np.cos(x+y+t)*np.sin(x+y+t)*(np.cos(x)*np.sin(y+t)-np.sin(x)*np.cos(y+t)))
)
else: # u.n=0
return (rho*np.sin(pi*x)*np.cos(pi*y)*np.cos(t) # Time derivative
+ rho*pi*np.sin(pi*x)*np.cos(pi*x)*np.sin(t)**2 # non-linearity
- (0. if KILL_PRESSURE_TERM==True else 1.)*np.sin(x)*np.sin(y+t) # Pressure
- dynamic_viscosity(X,t)*(-2*pi**2*np.sin(pi*x)*np.cos(pi*y)*np.sin(t)) # Diffusion
)
def forcey(X,t):
x = X[0]
y = X[1]
rho = density(X,t)
pi = np.pi
if manufactured_solution == 1: #u.n!=0
return (-rho*np.cos(x)*np.sin(y+t) # Time derivative
- rho*np.sin(y+t)*np.cos(y+t) #Non-linearity
+ (0. if KILL_PRESSURE_TERM==True else 1.)*np.cos(x)*np.cos(y+t) #Pressure
+ (2*dynamic_viscosity(X,t)*np.cos(x)*np.cos(y+t) # Diffusion
+(0. if mu_constant==True else 1.)*mu*2*np.cos(x+y+t)*np.sin(x+y+t)*(-np.sin(x)*np.cos(y+t)-np.cos(x)*np.sin(y+t))
+(0. if mu_constant==True else 1.)*mu*2*np.cos(x+y+t)*np.sin(x+y+t)*(np.sin(x)*np.cos(y+t)-np.cos(x)*np.sin(y+t)))
)
else:
return (-rho*np.cos(pi*x)*np.sin(pi*y)*np.cos(t) # Time derivative
+ rho*pi*np.sin(pi*y)*np.cos(pi*y)*np.sin(t)**2 # non-linearity
+ (0. if KILL_PRESSURE_TERM==True else 1.)*np.cos(x)*np.cos(y+t) #Pressure
- dynamic_viscosity(X,t)*(2*pi**2*np.cos(pi*x)*np.sin(pi*y)*np.sin(t)) # Diffusion
)
forceTerms = {0:forcex,
1:forcey}
##################
# EXACT SOLUTION #
##################
class velx(object):
def __init__(self):
pass
def uOfXT(self,x,t):
pi = np.pi
if manufactured_solution == 1:
return np.sin(x[0])*np.sin(x[1]+t)
else:
return np.sin(pi*x[0])*np.cos(pi*x[1])*np.sin(t)
def duOfXT(self,x,t):
if manufactured_solution == 1:
return [np.cos(x[0])*np.sin(x[1]+t),
np.sin(x[0])*np.cos(x[1]+t)]
else:
return [pi*np.cos(pi*x[0])*np.cos(pi*x[1])*np.sin(t),
-pi*np.sin(pi*x[0])*np.sin(pi*x[1])*np.sin(t)]
class vely(object):
def __init__(self):
pass
def uOfXT(self,x,t):
pi = np.pi
if manufactured_solution == 1:
return np.cos(x[0])*np.cos(x[1]+t)
else:
return -np.cos(pi*x[0])*np.sin(pi*x[1])*np.sin(t)
def duOfXT(self,x,t):
if manufactured_solution == 1:
return [-np.sin(x[0])*np.cos(x[1]+t),
-np.cos(x[0])*np.sin(x[1]+t)]
else:
return [pi*np.sin(pi*x[0])*np.sin(pi*x[1])*np.sin(t),
-pi*np.cos(pi*x[0])*np.cos(pi*x[1])*np.sin(t)]
analyticalSolution = {0:velx(),
1:vely()}
class pressure(object):
def __init__(self):
pass
def uOfXT(self,x,t):
return np.cos(x[0])*np.sin(x[1]+t)
analyticalPressureSolution={0:pressure()}
| mit | -4,424,440,856,964,118,000 | 33.768889 | 133 | 0.448677 | false | 3.027477 | false | false | false |
davidvossel/pcs | pcs/usage.py | 1 | 51366 | import re
examples = ""
def full_usage():
global examples
out = ""
out += main(False)
out += strip_extras(resource([],False))
out += strip_extras(cluster([],False))
out += strip_extras(stonith([],False))
out += strip_extras(property([],False))
out += strip_extras(constraint([],False))
out += strip_extras(acl([],False))
out += strip_extras(status([],False))
out += strip_extras(config([],False))
out += strip_extras(pcsd([],False))
print out.strip()
print "Examples:\n" + examples.replace(" \ ","")
def strip_extras(text):
global examples
ret = ""
group_name = text.split(" ")[2]
in_commands = False
in_examples = False
lines = text.split("\n")
minicmd = ""
ret += group_name.title() + ":\n"
for line in lines:
if not in_commands:
if line == "Commands:":
in_commands = True
continue
if not in_examples:
if line == "Examples:":
in_examples = True
continue
if not in_examples and not in_commands:
continue
if len(line) >= 4:
if line[0:4] == " ":
if line[4:8] != " ":
if in_examples:
minicmd = line.lstrip() + " "
else:
minicmd = " " + " " + line.lstrip() + " "
else:
minicmd += line.lstrip() + " "
else:
if in_commands:
break
else:
if in_examples:
examples += minicmd + "\n\n"
else:
ret += minicmd + "\n"
minicmd = ""
return ret
# Print only output for items that match the args
# For now we only look at the first arg
# If no args, then we return the full output
def sub_usage(args, output):
if len(args) == 0:
return output
ret = ""
lines = output.split('\n')
begin_printing = False
usage = re.sub("\[commands\]", args[0], lines[1])
for line in lines:
if begin_printing == True and re.match("^ [^ ]",line) and not re.match("^ " + args[0], line):
begin_printing = False
if not re.match("^ ",line) and not re.match("^$",line):
begin_printing = False
if re.match("^ " + args[0], line):
begin_printing = True
if begin_printing:
ret += line + "\n"
if ret != "":
return "\n" + usage + "\n" + ret.rstrip() + "\n"
else:
return output
def dict_depth(d, depth=0):
if not isinstance(d, dict) or not d:
return depth
return max(dict_depth(v, depth+1) for k, v in d.iteritems())
def sub_gen_code(level,item,prev_level=[],spaces=""):
out = ""
if dict_depth(item) <= level:
return ""
out += 'case "${cur' + str(level) + '}" in\n'
next_level = []
for key,val in item.items():
if len(val) == 0:
continue
values = " ".join(val.keys())
values = values.replace("|"," ")
out += " " + key + ")\n"
if len(val) > 0 and level != 1:
out += sub_gen_code(level-1,item[key],[] ,spaces + " ")
else:
out += " " + 'COMPREPLY=($(compgen -W "' + values + '" -- ${cur}))\n'
out += " return 0\n"
out += " ;;\n"
out += " *)\n"
out += " ;;\n"
out += 'esac\n'
temp = out.split('\n')
new_out = ""
for l in temp:
new_out += spaces + l + "\n"
return new_out
def sub_generate_bash_completion():
tree = {}
tree["resource"] = generate_tree(resource([],False))
tree["cluster"] = generate_tree(cluster([],False))
tree["stonith"] = generate_tree(stonith([],False))
tree["property"] = generate_tree(property([],False))
tree["acl"] = generate_tree(acl([],False))
tree["constraint"] = generate_tree(constraint([],False))
tree["status"] = generate_tree(status([],False))
tree["config"] = generate_tree(config([],False))
tree["pcsd"] = generate_tree(pcsd([],False))
print """
_pcs()
{
local cur cur1 cur2 cur3
COMPREPLY=()
cur="${COMP_WORDS[COMP_CWORD]}"
if [ "$COMP_CWORD" -gt "0" ]; then cur1="${COMP_WORDS[COMP_CWORD-1]}";fi
if [ "$COMP_CWORD" -gt "1" ]; then cur2="${COMP_WORDS[COMP_CWORD-2]}";fi
if [ "$COMP_CWORD" -gt "2" ]; then cur3="${COMP_WORDS[COMP_CWORD-3]}";fi
"""
print sub_gen_code(3,tree,[])
print sub_gen_code(2,tree,[])
print sub_gen_code(1,tree,[])
print """
if [ $COMP_CWORD -eq 1 ]; then
COMPREPLY=( $(compgen -W "resource cluster stonith property acl constraint status config" -- $cur) )
fi
return 0
}
complete -F _pcs pcs
"""
def generate_tree(usage_txt):
ignore = True
ret_hash = {}
cur_stack = []
for l in usage_txt.split('\n'):
if l.startswith("Commands:"):
ignore = False
continue
if l.startswith("Examples:"):
break
if ignore == True:
continue
if re.match("^ \w",l):
args = l.split()
arg = args.pop(0)
if not arg in ret_hash:
ret_hash[arg] = {}
cur_hash = ret_hash[arg]
for arg in args:
if arg.startswith('[') or arg.startswith('<'):
break
if not arg in cur_hash:
cur_hash[arg] = {}
cur_hash = cur_hash[arg]
return ret_hash
def main(pout=True):
output = """
Usage: pcs [-f file] [-h] [commands]...
Control and configure pacemaker and corosync.
Options:
-h, --help Display usage and exit
-f file Perform actions on file instead of active CIB
--debug Print all network traffic and external commands run
--version Print pcs version information
Commands:
cluster Configure cluster options and nodes
resource Manage cluster resources
stonith Configure fence devices
constraint Set resource constraints
property Set pacemaker properties
acl Set pacemaker access control lists
status View cluster status
config View and manage cluster configuration
pcsd Manage pcs daemon
"""
# Advanced usage to possibly add later
# --corosync_conf=<corosync file> Specify alternative corosync.conf file
if pout:
print output
else:
return output
def resource(args = [], pout = True):
output = """
Usage: pcs resource [commands]...
Manage pacemaker resources
Commands:
show [resource id] [--full] [--groups]
Show all currently configured resources or if a resource is specified
show the options for the configured resource. If --full is specified
all configured resource options will be displayed. If --groups is
specified, only show groups (and their resources).
list [<standard|provider|type>] [--nodesc]
Show list of all available resources, optionally filtered by specified
type, standard or provider. If --nodesc is used then descriptions
of resources are not printed.
describe <standard:provider:type|type>
Show options for the specified resource
create <resource id> <standard:provider:type|type> [resource options]
[op <operation action> <operation options> [<operation action>
<operation options>]...] [meta <meta options>...]
[--clone <clone options> | --master <master options> |
--group <group name> [--before <resource id> | --after <resource id>]
] [--disabled] [--wait[=n]]
Create specified resource. If --clone is used a clone resource is
created if --master is specified a master/slave resource is created.
If --group is specified the resource is added to the group named. You
can use --before or --after to specify the position of the added
resource relatively to some resource already existing in the group.
If --disabled is specified the resource is not started automatically.
If --wait is specified, pcs will wait up to 'n' seconds for the resource
to start and then return 0 if the resource is started, or 1 if
the resource has not yet started. If 'n' is not specified it defaults
to 60 minutes.
Example: Create a new resource called 'VirtualIP' with IP address
192.168.0.99, netmask of 32, monitored everything 30 seconds,
on eth2.
pcs resource create VirtualIP ocf:heartbeat:IPaddr2 \\
ip=192.168.0.99 cidr_netmask=32 nic=eth2 \\
op monitor interval=30s
delete <resource id|group id|master id|clone id>
Deletes the resource, group, master or clone (and all resources within
the group/master/clone).
enable <resource id> [--wait[=n]]
Allow the cluster to start the resource. Depending on the rest of the
configuration (constraints, options, failures, etc), the resource may
remain stopped. If --wait is specified, pcs will wait up to 'n' seconds
for the resource to start and then return 0 if the resource is started,
or 1 if the resource has not yet started. If 'n' is not specified it
defaults to 60 minutes.
disable <resource id> [--wait[=n]]
Attempt to stop the resource if it is running and forbid the cluster
from starting it again. Depending on the rest of the configuration
(constraints, options, failures, etc), the resource may remain
started. If --wait is specified, pcs will wait up to 'n' seconds for
the resource to stop and then return 0 if the resource is stopped or 1
if the resource has not stopped. If 'n' is not specified it defaults
to 60 minutes.
restart <resource id> [node] [--wait=n]
Restart the resource specified. If a node is specified and if the
resource is a clone or master/slave it will be restarted only on
the node specified. If --wait is specified, then we will wait
up to 'n' seconds for the resource to be restarted and return 0 if
the restart was successful or 1 if it was not.
debug-start <resource id> [--full]
This command will force the specified resource to start on this node
ignoring the cluster recommendations and print the output from
starting the resource. Using --full will give more detailed output.
This is mainly used for debugging resources that fail to start.
debug-stop <resource id> [--full]
This command will force the specified resource to stop on this node
ignoring the cluster recommendations and print the output from
stopping the resource. Using --full will give more detailed output.
This is mainly used for debugging resources that fail to stop.
debug-promote <resource id> [--full]
This command will force the specified resource to be promoted on this
node ignoring the cluster recommendations and print the output from
promoting the resource. Using --full will give more detailed output.
This is mainly used for debugging resources that fail to promote.
debug-demote <resource id> [--full]
This command will force the specified resource to be demoted on this
node ignoring the cluster recommendations and print the output from
demoting the resource. Using --full will give more detailed output.
This is mainly used for debugging resources that fail to demote.
debug-monitor <resource id> [--full]
This command will force the specified resource to be moniored on this
node ignoring the cluster recommendations and print the output from
monitoring the resource. Using --full will give more detailed output.
This is mainly used for debugging resources that fail to be monitored.
move <resource id> [destination node] [--master] [lifetime=<lifetime>]
[--wait[=n]]
Move the resource off the node it is currently running on by creating a
-INFINITY location constraint to ban the node. If destination node is
specified the resource will be moved to that node by creating an
INFINITY location constraint to prefer the destination node. If
--master is used the scope of the command is limited to the master role
and you must use the master id (instead of the resource id). If
lifetime is specified then the constraint will expire after that time,
otherwise it defaults to infinity and the constraint can be cleared
manually with 'pcs resource clear' or 'pcs constraint delete'. If
--wait is specified, pcs will wait up to 'n' seconds for the resource
to move and then return 0 on success or 1 on error. If 'n' is not
specified it defaults to 60 minutes.
If you want the resource to preferably avoid running on some nodes but
be able to failover to them use 'pcs location avoids'.
ban <resource id> [node] [--master] [lifetime=<lifetime>] [--wait[=n]]
Prevent the resource id specified from running on the node (or on the
current node it is running on if no node is specified) by creating a
-INFINITY location constraint. If --master is used the scope of the
command is limited to the master role and you must use the master id
(instead of the resource id). If lifetime is specified then the
constraint will expire after that time, otherwise it defaults to
infinity and the constraint can be cleared manually with 'pcs resource
clear' or 'pcs constraint delete'. If --wait is specified, pcs will
wait up to 'n' seconds for the resource to move and then return 0
on success or 1 on error. If 'n' is not specified it defaults to 60
minutes.
If you want the resource to preferably avoid running on some nodes but
be able to failover to them use 'pcs location avoids'.
clear <resource id> [node] [--master] [--wait[=n]]
Remove constraints created by move and/or ban on the specified
resource (and node if specified).
If --master is used the scope of the command is limited to the
master role and you must use the master id (instead of the resource id).
If --wait is specified, pcs will wait up to 'n' seconds for the
operation to finish (including starting and/or moving resources if
appropriate) and then return 0 on success or 1 on error. If 'n' is not
specified it defaults to 60 minutes.
standards
List available resource agent standards supported by this installation.
(OCF, LSB, etc.)
providers
List available OCF resource agent providers
agents [standard[:provider]]
List available agents optionally filtered by standard and provider
update <resource id> [resource options] [op [<operation action>
<operation options>]...] [meta <meta operations>...] [--wait[=n]]
Add/Change options to specified resource, clone or multi-state
resource. If an operation (op) is specified it will update the first
found operation with the same action on the specified resource, if no
operation with that action exists then a new operation will be created.
(WARNING: all existing options on the updated operation will be reset
if not specified.) If you want to create multiple monitor operations
you should use the 'op add' & 'op remove' commands. If --wait is
specified, pcs will wait up to 'n' seconds for the changes to take
effect and then return 0 if the changes have been processed or 1
otherwise. If 'n' is not specified it defaults to 60 minutes.
op add <resource id> <operation action> [operation properties]
Add operation for specified resource
op remove <resource id> <operation action> [<operation properties>...]
Remove specified operation (note: you must specify the exact operation
properties to properly remove an existing operation).
op remove <operation id>
Remove the specified operation id
op defaults [options]
Set default values for operations, if no options are passed, lists
currently configured defaults
meta <resource id | group id | master id | clone id> <meta options>
[--wait[=n]]
Add specified options to the specified resource, group, master/slave
or clone. Meta options should be in the format of name=value, options
may be removed by setting an option without a value. If --wait is
specified, pcs will wait up to 'n' seconds for the changes to take
effect and then return 0 if the changes have been processed or 1
otherwise. If 'n' is not specified it defaults to 60 minutes.
Example: pcs resource meta TestResource failure-timeout=50 stickiness=
group add <group name> <resource id> [resource id] ... [resource id]
[--before <resource id> | --after <resource id>] [--wait[=n]]
Add the specified resource to the group, creating the group if it does
not exist. If the resource is present in another group it is moved
to the new group. You can use --before or --after to specify
the position of the added resources relatively to some resource already
existing in the group. If --wait is specified, pcs will wait up to 'n'
seconds for the operation to finish (including moving resources if
appropriate) and then return 0 on success or 1 on error. If 'n' is not
specified it defaults to 60 minutes.
group remove <group name> <resource id> [resource id] ... [resource id]
[--wait[=n]]
Remove the specified resource(s) from the group, removing the group if
it no resources remain. If --wait is specified, pcs will wait up to 'n'
seconds for the operation to finish (including moving resources if
appropriate) and then return 0 on success or 1 on error. If 'n' is not
specified it defaults to 60 minutes.
ungroup <group name> [resource id] ... [resource id] [--wait[=n]]
Remove the group (Note: this does not remove any resources from the
cluster) or if resources are specified, remove the specified resources
from the group. If --wait is specified, pcs will wait up to 'n' seconds
for the operation to finish (including moving resources if appropriate)
and the return 0 on success or 1 on error. If 'n' is not specified it
defaults to 60 minutes.
clone <resource id | group id> [clone options]... [--wait[=n]]
Setup up the specified resource or group as a clone. If --wait is
specified, pcs will wait up to 'n' seconds for the operation to finish
(including starting clone instances if appropriate) and then return 0
on success or 1 on error. If 'n' is not specified it defaults to 60
minutes.
unclone <resource id | group name> [--wait[=n]]
Remove the clone which contains the specified group or resource (the
resource or group will not be removed). If --wait is specified, pcs
will wait up to 'n' seconds for the operation to finish (including
stopping clone instances if appropriate) and then return 0 on success
or 1 on error. If 'n' is not specified it defaults to 60 minutes.
master [<master/slave name>] <resource id | group name> [options]
[--wait[=n]]
Configure a resource or group as a multi-state (master/slave) resource.
If --wait is specified, pcs will wait up to 'n' seconds for the operation
to finish (including starting and promoting resource instances if
appropriate) and then return 0 on success or 1 on error. If 'n' is not
specified it defaults to 60 minutes.
Note: to remove a master you must remove the resource/group it contains.
manage <resource id> ... [resource n]
Set resources listed to managed mode (default)
unmanage <resource id> ... [resource n]
Set resources listed to unmanaged mode
defaults [options]
Set default values for resources, if no options are passed, lists
currently configured defaults
cleanup [<resource id>]
Cleans up the resource in the lrmd (useful to reset the resource
status and failcount). This tells the cluster to forget the
operation history of a resource and re-detect its current state.
This can be useful to purge knowledge of past failures that have
since been resolved. If a resource id is not specified then all
resources/stonith devices will be cleaned up.
failcount show <resource id> [node]
Show current failcount for specified resource from all nodes or
only on specified node
failcount reset <resource id> [node]
Reset failcount for specified resource on all nodes or only on
specified node. This tells the cluster to forget how many times
a resource has failed in the past. This may allow the resource to
be started or moved to a more preferred location.
relocate dry-run [resource1] [resource2] ...
The same as 'relocate run' but has no effect on the cluster.
relocate run [resource1] [resource2] ...
Relocate specified resources to their preferred nodes. If no resources
are specified, relocate all resources.
This command calculates the preferred node for each resource while
ignoring resource stickiness. Then it creates location constraints
which will cause the resources to move to their preferred nodes. Once
the resources have been moved the constraints are deleted automatically.
Note that the preferred node is calculated based on current cluster
status, constraints, location of resources and other settings and thus
it might change over time.
relocate show
Display current status of resources and their optimal node ignoring
resource stickiness.
relocate clean
Remove all constraints created by the 'relocate run' command.
Examples:
pcs resource show
Show all resources
pcs resource show VirtualIP
Show options specific to the 'VirtualIP' resource
pcs resource create VirtualIP ocf:heartbeat:IPaddr2 ip=192.168.0.99 \\
cidr_netmask=32 nic=eth2 op monitor interval=30s
Create a new resource called 'VirtualIP' with options
pcs resource create VirtualIP IPaddr2 ip=192.168.0.99 \\
cidr_netmask=32 nic=eth2 op monitor interval=30s
Create a new resource called 'VirtualIP' with options
pcs resource update VirtualIP ip=192.168.0.98 nic=
Change the ip address of VirtualIP and remove the nic option
pcs resource delete VirtualIP
Delete the VirtualIP resource
Notes:
Starting resources on a cluster is (almost) always done by pacemaker and
not directly from pcs. If your resource isn't starting, it's usually
due to either a misconfiguration of the resource (which you debug in
the system log), or constraints preventing the resource from starting or
the resource being disabled. You can use 'pcs resource debug-start' to
test resource configuration, but it should *not* normally be used to start
resources in a cluster.
"""
if pout:
print sub_usage(args, output)
else:
return output
def cluster(args = [], pout = True):
output = """
Usage: pcs cluster [commands]...
Configure cluster for use with pacemaker
Commands:
auth [node] [...] [-u username] [-p password] [--force] [--local]
Authenticate pcs to pcsd on nodes specified, or on all nodes
configured in corosync.conf if no nodes are specified (authorization
tokens are stored in ~/.pcs/tokens or /var/lib/pcsd/tokens for root).
By default all nodes are also authenticated to each other, using
--local only authenticates the local node (and does not authenticate
the remote nodes with each other). Using --force forces
re-authentication to occur.
setup [--start] [--local] [--enable] --name <cluster name> <node1[,node1-altaddr]>
[node2[,node2-altaddr]] [..] [--transport <udpu|udp>] [--rrpmode active|passive]
[--addr0 <addr/net> [[[--mcast0 <address>] [--mcastport0 <port>]
[--ttl0 <ttl>]] | [--broadcast0]]
[--addr1 <addr/net> [[[--mcast1 <address>] [--mcastport1 <port>]
[--ttl1 <ttl>]] | [--broadcast1]]]]
[--wait_for_all=<0|1>] [--auto_tie_breaker=<0|1>]
[--last_man_standing=<0|1> [--last_man_standing_window=<time in ms>]]
[--ipv6] [--token <timeout>] [--token_coefficient <timeout>]
[--join <timeout>] [--consensus <timeout>] [--miss_count_const <count>]
[--fail_recv_const <failures>]
Configure corosync and sync configuration out to listed nodes.
--local will only perform changes on the local node,
--start will also start the cluster on the specified nodes,
--enable will enable corosync and pacemaker on node startup,
--transport allows specification of corosync transport (default: udpu),
--rrpmode allows you to set the RRP mode of the system. Currently only
'passive' is supported or tested (using 'active' is not
recommended).
The --wait_for_all, --auto_tie_breaker, --last_man_standing,
--last_man_standing_window options are all documented in corosync's
votequorum(5) man page.
--ipv6 will configure corosync to use ipv6 (instead of ipv4)
--token <timeout> sets time in milliseconds until a token loss is
declared after not receiving a token (default 1000 ms)
--token_coefficient <timeout> sets time in milliseconds used for clusters
with at least 3 nodes as a coefficient for real token timeout calculation
(token + (number_of_nodes - 2) * token_coefficient) (default 650 ms)
--join <timeout> sets time in milliseconds to wait for join messages
(default 50 ms)
--consensus <timeout> sets time in milliseconds to wait for consensus
to be achieved before starting a new round of membership configuration
(default 1200 ms)
--miss_count_const <count> sets the maximum number of times on
receipt of a token a message is checked for retransmission before
a retransmission occurs (default 5 messages)
--fail_recv_const <failures> specifies how many rotations of the token
without receiving any messages when messages should be received
may occur before a new configuration is formed (default 2500 failures)
Configuring Redundant Ring Protocol (RRP)
When using udpu (the default) specifying nodes, specify the ring 0
address first followed by a ',' and then the ring 1 address.
Example: pcs cluster setup --name cname nodeA-0,nodeA-1 nodeB-0,nodeB-1
When using udp, using --addr0 and --addr1 will allow you to configure
rrp mode for corosync. It's recommended to use a network (instead of
IP address) for --addr0 and --addr1 so the same corosync.conf file can
be used around the cluster. --mcast0 defaults to 239.255.1.1 and
--mcast1 defaults to 239.255.2.1, --mcastport0/1 default to 5405 and
ttl defaults to 1. If --broadcast is specified, --mcast0/1,
--mcastport0/1 & --ttl0/1 are ignored.
start [--all] [node] [...]
Start corosync & pacemaker on specified node(s), if a node is not
specified then corosync & pacemaker are started on the local node.
If --all is specified then corosync & pacemaker are started on all
nodes.
stop [--all] [node] [...]
Stop corosync & pacemaker on specified node(s), if a node is not
specified then corosync & pacemaker are stopped on the local node.
If --all is specified then corosync & pacemaker are stopped on all
nodes.
kill
Force corosync and pacemaker daemons to stop on the local node
(performs kill -9).
enable [--all] [node] [...]
Configure corosync & pacemaker to run on node boot on specified
node(s), if node is not specified then corosync & pacemaker are
enabled on the local node. If --all is specified then corosync &
pacemaker are enabled on all nodes.
disable [--all] [node] [...]
Configure corosync & pacemaker to not run on node boot on specified
node(s), if node is not specified then corosync & pacemaker are
disabled on the local node. If --all is specified then corosync &
pacemaker are disabled on all nodes. (Note: this is the default after
installation)
standby [<node>] | --all
Put specified node into standby mode (the node specified will no longer
be able to host resources), if no node or options are specified the
current node will be put into standby mode, if --all is specified all
nodes will be put into standby mode.
unstandby [<node>] | --all
Remove node from standby mode (the node specified will now be able to
host resources), if no node or options are specified the current node
will be removed from standby mode, if --all is specified all nodes will
be removed from standby mode.
remote-node add <hostname> <resource id> [options]
Enables the specified resource as a remote-node resource on the
specified hostname (hostname should be the same as 'uname -n')
remote-node remove <hostname>
Disables any resources configured to be remote-node resource on the
specified hostname (hostname should be the same as 'uname -n')
status
View current cluster status (an alias of 'pcs status cluster')
pcsd-status [node] [...]
Get current status of pcsd on nodes specified, or on all nodes
configured in corosync.conf if no nodes are specified
sync
Sync corosync configuration to all nodes found from current
corosync.conf file (cluster.conf on systems running Corosync 1.x)
quorum unblock
Cancel waiting for all nodes when establishing quorum. Useful in
situations where you know the cluster is inquorate, but you are
confident that the cluster should proceed with resource management
regardless.
cib [filename] [scope=<scope> | --config]
Get the raw xml from the CIB (Cluster Information Base). If a
filename is provided, we save the cib to that file, otherwise the cib
is printed. Specify scope to get a specific section of the CIB. Valid
values of the scope are: configuration, nodes, resources, constraints,
crm_config, rsc_defaults, op_defaults, status. --config is the same
as scope=configuration. Use of --config is recommended. Do not specify
a scope if you need to get the whole CIB or be warned in the case
of outdated CIB on cib-push.
cib-push <filename> [scope=<scope> | --config]
Push the raw xml from <filename> to the CIB (Cluster Information Base).
Specify scope to push a specific section of the CIB. Valid values
of the scope are: configuration, nodes, resources, constraints,
crm_config, rsc_defaults, op_defaults. --config is the same as
scope=configuration. Use of --config is recommended. Do not specify
a scope if you need to push the whole CIB or be warned in the case
of outdated CIB.
cib-upgrade
Upgrade the cib to the latest version
edit [scope=<scope> | --config]
Edit the cib in the editor specified by the $EDITOR environment
variable and push out any changes upon saving. Specify scope to edit
a specific section of the CIB. Valid values of the scope are:
configuration, nodes, resources, constraints, crm_config, rsc_defaults,
op_defaults. --config is the same as scope=configuration. Use of
--config is recommended. Do not specify a scope if you need to edit
the whole CIB or be warned in the case of outdated CIB.
node add <node[,node-altaddr]> [--start] [--enable]
Add the node to corosync.conf and corosync on all nodes in the cluster
and sync the new corosync.conf to the new node. If --start is specified
also start corosync/pacemaker on the new node, if --enable is specified
enable corosync/pacemaker on new node.
When using Redundant Ring Protocol (RRP) with udpu transport, specify
the ring 0 address first followed by a ',' and then the ring 1 address.
node remove <node>
Shutdown specified node and remove it from pacemaker and corosync on
all other nodes in the cluster
uidgid
List the current configured uids and gids of users allowed to connect
to corosync
uidgid add [uid=<uid>] [gid=<gid>]
Add the specified uid and/or gid to the list of users/groups
allowed to connect to corosync
uidgid rm [uid=<uid>] [gid=<gid>]
Remove the specified uid and/or gid from the list of users/groups
allowed to connect to corosync
corosync [node]
Get the corosync.conf from the specified node or from the current node
if node not specified
reload corosync
Reload the corosync configuration on the current node
destroy [--all]
Permanently destroy the cluster on the current node, killing all
corosync/pacemaker processes removing all cib files and the
corosync.conf file. Using --all will attempt to destroy the
cluster on all nodes configure in the corosync.conf file.
WARNING: This command permantly removes any cluster configuration that
has been created. It is recommended to run 'pcs cluster stop' before
destroying the cluster.
verify [-V] [filename]
Checks the pacemaker configuration (cib) for syntax and common
conceptual errors. If no filename is specified the check is
performmed on the currently running cluster. If -V is used
more verbose output will be printed
report [--from "YYYY-M-D H:M:S" [--to "YYYY-M-D" H:M:S"]] dest
Create a tarball containing everything needed when reporting cluster
problems. If --from and --to are not used, the report will include
the past 24 hours.
"""
if pout:
print sub_usage(args, output)
else:
return output
def stonith(args = [], pout = True):
output = """
Usage: pcs stonith [commands]...
Configure fence devices for use with pacemaker
Commands:
show [stonith id] [--full]
Show all currently configured stonith devices or if a stonith id is
specified show the options for the configured stonith device. If
--full is specified all configured stonith options will be displayed
list [filter] [--nodesc]
Show list of all available stonith agents (if filter is provided then
only stonith agents matching the filter will be shown). If --nodesc is
used then descriptions of stonith agents are not printed.
describe <stonith agent>
Show options for specified stonith agent
create <stonith id> <stonith device type> [stonith device options]
Create stonith device with specified type and options
update <stonith id> [stonith device options]
Add/Change options to specified stonith id
delete <stonith id>
Remove stonith id from configuration
cleanup [<stonith id>]
Cleans up the stonith device in the lrmd (useful to reset the
status and failcount). This tells the cluster to forget the
operation history of a stonith device and re-detect its current state.
This can be useful to purge knowledge of past failures that have
since been resolved. If a stonith id is not specified then all
resources/stonith devices will be cleaned up.
level
Lists all of the fencing levels currently configured
level add <level> <node> <devices>
Add the fencing level for the specified node with a comma separated
list of devices (stonith ids) to attempt for that node at that level.
Fence levels are attempted in numerical order (starting with 1) if
a level succeeds (meaning all devices are successfully fenced in that
level) then no other levels are tried, and the node is considered
fenced.
level remove <level> [node id] [stonith id] ... [stonith id]
Removes the fence level for the level, node and/or devices specified
If no nodes or devices are specified then the fence level is removed
level clear [node|stonith id(s)]
Clears the fence levels on the node (or stonith id) specified or clears
all fence levels if a node/stonith id is not specified. If more than
one stonith id is specified they must be separated by a comma and no
spaces. Example: pcs stonith level clear dev_a,dev_b
level verify
Verifies all fence devices and nodes specified in fence levels exist
fence <node> [--off]
Fence the node specified (if --off is specified, use the 'off' API
call to stonith which will turn the node off instead of rebooting it)
confirm <node>
Confirm that the host specified is currently down.
WARNING: if this node is not actually down data corruption/cluster
failure can occur.
Examples:
pcs stonith create MyStonith fence_virt pcmk_host_list=f1
"""
if pout:
print sub_usage(args, output)
else:
return output
def property(args = [], pout = True):
output = """
Usage: pcs property <properties>...
Configure pacemaker properties
Commands:
list|show [<property> | --all | --defaults]
List property settings (default: lists configured properties).
If --defaults is specified will show all property defaults, if --all
is specified, current configured properties will be shown with unset
properties and their defaults.
Run 'man pengine' and 'man crmd' to get a description of the properties.
set [--force] [--node <nodename>] <property>=[<value>]
Set specific pacemaker properties (if the value is blank then the
property is removed from the configuration). If a property is not
recognized by pcs the property will not be created unless the
--force is used. If --node is used a node attribute is set on
the specified node.
Run 'man pengine' and 'man crmd' to get a description of the properties.
unset [--node <nodename>] <property>
Remove property from configuration (or remove attribute from
specified node if --node is used).
Run 'man pengine' and 'man crmd' to get a description of the properties.
Examples:
pcs property set stonith-enabled=false
"""
if pout:
print sub_usage(args, output)
else:
return output
def constraint(args = [], pout = True):
output = """
Usage: pcs constraint [constraints]...
Manage resource constraints
Commands:
[list|show] --full
List all current location, order and colocation constraints, if --full
is specified also list the constraint ids.
location <resource id> prefers <node[=score]>...
Create a location constraint on a resource to prefer the specified
node and score (default score: INFINITY)
location <resource id> avoids <node[=score]>...
Create a location constraint on a resource to avoid the specified
node and score (default score: INFINITY)
location <resource id> rule [id=<rule id>] [resource-discovery=<option>]
[role=master|slave] [constraint-id=<id>]
[score=<score>|score-attribute=<attribute>] <expression>
Creates a location rule on the specified resource where the expression
looks like one of the following:
defined|not_defined <attribute>
<attribute> lt|gt|lte|gte|eq|ne [string|integer|version] <value>
date gt|lt <date>
date in_range <date> to <date>
date in_range <date> to duration <duration options>...
date-spec <date spec options>...
<expression> and|or <expression>
( <expression> )
where duration options and date spec options are: hours, monthdays,
weekdays, yeardays, months, weeks, years, weekyears, moon.
If score is omitted it defaults to INFINITY. If id is omitted one is
generated from the resource id. If resource-discovery is omitted it
defaults to 'always'.
location show [resources|nodes [node id|resource id]...] [--full]
List all the current location constraints, if 'resources' is specified
location constraints are displayed per resource (default), if 'nodes'
is specified location constraints are displayed per node. If specific
nodes or resources are specified then we only show information about
them. If --full is specified show the internal constraint id's as well.
location add <id> <resource name> <node> <score> [resource-discovery=<option>]
Add a location constraint with the appropriate id, resource name,
node name and score. (For more advanced pacemaker usage)
location remove <id> [<resource name> <node> <score>]
Remove a location constraint with the appropriate id, resource name,
node name and score. (For more advanced pacemaker usage)
order show [--full]
List all current ordering constraints (if --full is specified show
the internal constraint id's as well).
order [action] <resource id> then [action] <resource id> [options]
Add an ordering constraint specifying actions (start, stop, promote,
demote) and if no action is specified the default action will be
start.
Available options are kind=Optional/Mandatory/Serialize,
symmetrical=true/false, require-all=true/false and id=<constraint-id>.
order set <resource1> <resource2> [resourceN]... [options] [set
<resourceX> <resourceY> ... [options]]
[setoptions [constraint_options]]
Create an ordered set of resources.
Available options are sequential=true/false, require-all=true/false,
action=start/promote/demote/stop and role=Stopped/Started/Master/Slave.
Available constraint_options are id=<constraint-id>,
kind=Optional/Mandatory/Serialize and symmetrical=true/false.
order remove <resource1> [resourceN]...
Remove resource from any ordering constraint
colocation show [--full]
List all current colocation constraints (if --full is specified show
the internal constraint id's as well).
colocation add [master|slave] <source resource id> with [master|slave]
<target resource id> [score] [options] [id=constraint-id]
Request <source resource> to run on the same node where pacemaker has
determined <target resource> should run. Positive values of score
mean the resources should be run on the same node, negative values
mean the resources should not be run on the same node. Specifying
'INFINITY' (or '-INFINITY') for the score force <source resource> to
run (or not run) with <target resource>. (score defaults to "INFINITY")
A role can be master or slave (if no role is specified, it defaults to
'started').
colocation set <resource1> <resource2> [resourceN]... [options]
[set <resourceX> <resourceY> ... [options]]
[setoptions [constraint_options]]
Create a colocation constraint with a resource set.
Available options are sequential=true/false, require-all=true/false,
action=start/promote/demote/stop and role=Stopped/Started/Master/Slave.
Available constraint_options are id, score, score-attribute and
score-attribute-mangle.
colocation remove <source resource id> <target resource id>
Remove colocation constraints with <source resource>
remove [constraint id]...
Remove constraint(s) or constraint rules with the specified id(s)
ref <resource>...
List constraints referencing specified resource
rule add <constraint id> [id=<rule id>] [role=master|slave]
[score=<score>|score-attribute=<attribute>] <expression>
Add a rule to a constraint where the expression looks like one of
the following:
defined|not_defined <attribute>
<attribute> lt|gt|lte|gte|eq|ne [string|integer|version] <value>
date gt|lt <date>
date in_range <date> to <date>
date in_range <date> to duration <duration options>...
date-spec <date spec options>...
<expression> and|or <expression>
( <expression> )
where duration options and date spec options are: hours, monthdays,
weekdays, yeardays, months, weeks, years, weekyears, moon
If score is ommited it defaults to INFINITY. If id is ommited one is
generated from the constraint id.
rule remove <rule id>
Remove a rule if a rule id is specified, if rule is last rule in its
constraint, the constraint will be removed
"""
if pout:
print sub_usage(args, output)
else:
return output
def acl(args = [], pout = True):
output = """
Usage: pcs acl [commands]...
View and modify current cluster access control lists
Commands:
[show]
List all current access control lists
enable
Enable access control lists
disable
Disable access control lists
role create <role name> [description=<description>] [((read | write | deny)
(xpath <query> | id <id>))...]
Create a role with the name and (optional) description specified.
Each role can also have an unlimited number of permissions
(read/write/deny) applied to either an xpath query or the id
of a specific element in the cib
role delete <role name>
Delete the role specified and remove it from any users/groups it was
assigned to
role assign <role name> [to] <username/group>
Assign a role to a user or group already created with 'pcs acl
user/group create'
role unassign <role name> [from] <username/group>
Remove a role from the specified user
user create <username> <role name> [<role name>]...
Create an ACL for the user specified and assign roles to the user
user delete <username>
Remove the user specified (and roles assigned will be unassigned for
the specified user)
group create <group> <role name> [<role name>]...
Create an ACL for the group specified and assign roles to the group
group delete <group>
Remove the group specified (and roles assigned will be unassigned for
the specified group)
permission add <role name> ((read | write | deny) (xpath <query> |
id <id>))...
Add the listed permissions to the role specified
permission delete <permission id>
Remove the permission id specified (permission id's are listed in
parenthesis after permissions in 'pcs acl' output)
"""
if pout:
print sub_usage(args, output)
else:
return output
def status(args = [], pout = True):
output = """
Usage: pcs status [commands]...
View current cluster and resource status
Commands:
[status] [--full]
View all information about the cluster and resources (--full provides
more details)
resources
View current status of cluster resources
groups
View currently configured groups and their resources
cluster
View current cluster status
corosync
View current membership information as seen by corosync
nodes [corosync|both|config]
View current status of nodes from pacemaker. If 'corosync' is
specified, print nodes currently configured in corosync, if 'both'
is specified, print nodes from both corosync & pacemaker. If 'config'
is specified, print nodes from corosync & pacemaker configuration.
pcsd <node> ...
Show the current status of pcsd on the specified nodes
xml
View xml version of status (output from crm_mon -r -1 -X)
"""
if pout:
print sub_usage(args, output)
else:
return output
def config(args=[], pout=True):
output = """
Usage: pcs config [commands]...
View and manage cluster configuration
Commands:
[show]
View full cluster configuration
backup [filename]
Creates the tarball containing the cluster configuration files.
If filename is not specified the standard output will be used.
restore [--local] [filename]
Restores the cluster configuration files on all nodes from the backup.
If filename is not specified the standard input will be used.
If --local is specified only the files on the current node will
be restored.
checkpoint
List all available configuration checkpoints.
checkpoint view <checkpoint_number>
Show specified configuration checkpoint.
checkpoint restore <checkpoint_number>
Restore cluster configuration to specified checkpoint.
import-cman output=<filename> [input=<filename>] [--interactive]
[output-format=corosync.conf|cluster.conf]
Converts CMAN cluster configuration to Pacemaker cluster configuration.
Converted configuration will be saved to 'output' file. To send
the configuration to the cluster nodes the 'pcs config restore'
command can be used. If --interactive is specified you will be
prompted to solve incompatibilities manually. If no input is specified
/etc/cluster/cluster.conf will be used. You can force to create output
containing either cluster.conf or corosync.conf using the output-format
option.
"""
if pout:
print sub_usage(args, output)
else:
return output
def pcsd(args=[], pout=True):
output = """
Usage: pcs pcsd [commands]...
Manage pcs daemon
Commands:
certkey <certificate file> <key file>
Load custom certificate and key files for use in pcsd.
sync-certificates
Sync pcsd certificates to all nodes found from current corosync.conf
file (cluster.conf on systems running Corosync 1.x). WARNING: This will
restart pcsd daemon on the nodes.
"""
if pout:
print sub_usage(args, output)
else:
return output
| gpl-2.0 | -8,839,699,227,347,467,000 | 42.715745 | 108 | 0.65226 | false | 4.369343 | true | false | false |
anbangr/trusted-nova | nova/notifier/capacity_notifier.py | 5 | 2731 | # Copyright 2011 OpenStack LLC.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from nova import context
from nova import db
from nova import log as logging
LOG = logging.getLogger(__name__)
def notify(message):
"""Look for specific compute manager events and interprete them
so as to keep the Capacity table up to date.
NOTE: the True/False return codes are only for testing.
"""
# The event_type must start with 'compute.instance.'
event_type = message.get('event_type', None)
preamble = 'compute.instance.'
if not event_type or not event_type.startswith(preamble):
return False
# Events we're interested in end with .start and .end
event = event_type[len(preamble):]
parts = event.split('.')
suffix = parts[-1].lower()
event = event[:(-len(suffix) - 1)]
if suffix not in ['start', 'end']:
return False
started = suffix == 'start'
ended = suffix == 'end'
if started and event == 'create':
# We've already updated this stuff in the scheduler. Don't redo the
# work here.
return False
work = 1 if started else -1
# Extract the host name from the publisher id ...
publisher_preamble = 'compute.'
publisher = message.get('publisher_id', None)
if not publisher or not publisher.startswith(publisher_preamble):
return False
host = publisher[len(publisher_preamble):]
# If we deleted an instance, make sure we reclaim the resources.
# We may need to do something explicit for rebuild/migrate.
free_ram_mb = 0
free_disk_gb = 0
vms = 0
if ended and event == 'delete':
vms = -1
payload = message.get('payload', {})
free_ram_mb = payload.get('memory_mb', 0)
free_disk_gb = payload.get('disk_gb', 0)
LOG.debug("EventType=%(event_type)s -> host %(host)s: "
"ram %(free_ram_mb)d, disk %(free_disk_gb)d, "
"work %(work)d, vms%(vms)d" % locals())
db.api.compute_node_utilization_update(context.get_admin_context(), host,
free_ram_mb_delta=free_ram_mb, free_disk_gb_delta=free_disk_gb,
work_delta=work, vm_delta=vms)
return True
| apache-2.0 | -4,728,226,966,872,645,000 | 32.716049 | 78 | 0.652142 | false | 3.857345 | false | false | false |
OpenGrow/OpenGrow | app/views/others.py | 1 | 1060 | # -*- coding: utf-8 -*-
# System Imports
from xml.dom.minidom import parseString
# Flask Imports
from flask import render_template, redirect
from flask import url_for, request, send_from_directory, session
from werkzeug import secure_filename
# Local Imports
from app import app
from app.forms import *
# Package Imports
from decorators import login_required
from models import db
#Dashboard projet (changer de fichier dashboard.py !!)
@app.route('/opengrow/<id>', methods=['GET', 'POST'])
@login_required
def opengrow(id):
print "Project "+str(id)+" selected"
return render_template('opengrow.html', title='OpenGrow', project_id=id)
#Historique photos (changer de fichier dashboard.py !!)
@app.route('/stream', methods=['GET', 'POST'])
@login_required
def stream():
return render_template('camera.html', title='OpenGrow')
#Future page de settings
@app.route('/settings', methods=['GET', 'POST'])
@login_required
def settings():
return render_template('settings.html', title='OpenGrow', settings=globalsettings, form=form, ips=ips)
| gpl-3.0 | -1,451,137,897,622,250,800 | 24.853659 | 106 | 0.729245 | false | 3.521595 | false | false | false |
MasterofNumbers17/slight-fimulator | objects.py | 2 | 19346 | #!/usr/bin/env python
"""The package's classes
Slight Fimulator - Flight simulator in Python
Copyright (C) 2017, 2018 Hao Tian and Adrien Hopkins
This program is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program. If not, see <http://www.gnu.org/licenses/>.
"""
# Installs Python 3 division and print behaviour
from __future__ import division, print_function
import math
import os
import time
import pygame
PATH = os.path.dirname(os.path.realpath(__file__))
class Airplane(pygame.sprite.Sprite):
"""The class for an airplane sprite.
All units are stored internally in SI base units
"""
NEXT_ID = 0
MAX_SPEED = 500
TERMINAL_VELOCITY = MAX_SPEED / 5 # Why not?
LABELS = "ID:\tX:\tY:\tALT:\tSPD:\tACCEL:\tVSPD:\t\
HDG:\tROLL:\tPITCH:\tPTS:\tDMG:\t"
def __init__(self, x=(0, 0, 0, 0, 0), z=None, width=None,
height=None, altitude=None, player_id=None):
"""Initialize the instance."""
super(Airplane, self).__init__()
if z is None:
x, z, width, height, altitude = x
elif width is None:
altitude = z
x, z, width, height = x
elif height is None:
altitude = width
width, height = z
x, z = x
if player_id is None: # Get an ID for the airplane
self._id = Airplane.NEXT_ID
Airplane.NEXT_ID += 1
else: self._id = player_id
# Initialize private variables
self._pos = [x, z]
self._size = [width, height]
self._altitude = altitude
self._heading = 0
self._pitch = 0
self._speed = 0
self._acceleration = 0
self._gravity = 0
self._throttle = 0
self._roll_level = 0
self._vertical_roll_level = 0
self._autopilot_info = {
'enabled': False,
'conditions': {
'roll-centered': True,
'vertical-roll-centered': True,
'throttle-centered': True
}
}
self._within_objective_range = False
self._points = 0
self._exit_code = 0
self._health = 100
self._time = time.time()
def __repr__(self, show_labels=True):
"""Display some important stats about the plane."""
msg = ("%i\t%i\t%i\t%i\t%.1f\t%.1f\t%.1f\t%.1f\t%.1f\t%.1f\t\
%i\t%.1f\t" % (self.id_, self.x, self.z,
self.altitude, self.speed, self.acceleration,
self.vertical_velocity, self.heading, self.roll,
self.pitch, self.points, 100 - self.health))
if show_labels:
return "%s\n%s" % (Airplane.LABELS, msg)
else:
return msg
## variables
@property
def id_(self):
"""Get the plane's ID."""
return self._id
@property
def pos(self):
"""Get the plane's (x, z) position in metres."""
return self._pos
@pos.setter
def pos(self, new_value):
"""Set the plane's (x, z) position in metres."""
if not isinstance(new_value, (list, tuple)):
raise TypeError("Position must be a list or a tuple.")
if len(new_value) != 2:
raise ValueError("Position must contain two values.")
if not isinstance(new_value[0], (int, float)):
raise ValueError("X must be a number.")
if not isinstance(new_value[1], (int, float)):
raise ValueError("Z must be a number.")
self._pos = new_value
@property
def x(self):
"""Get the plane's x coordinate in metres."""
return self._pos[0]
@x.setter
def x(self, new_value):
"""Set the plane's x coordinate in metres."""
if not isinstance(new_value, (int, float)):
raise ValueError("X must be a number")
self._pos[0] = new_value
@property
def z(self):
"""Get the plane's z coordinate in metres."""
return self._pos[1]
@z.setter
def z(self, new_value):
"""Set the plane's z coordinate in metres."""
if not isinstance(new_value, (int, float)):
raise ValueError("Z must be a number")
self._pos[1] = new_value
@property
def altitude(self):
"""Get the plane's altitude in metres."""
return self._altitude
@altitude.setter
def altitude(self, new_value):
"""Set the plane's altitude in metres."""
if not isinstance(new_value, (int, float)):
raise TypeError("Altitude must be a number.")
self._altitude = new_value
y = altitude
@property
def heading(self):
"""Get the plane's heading in radians."""
return self._heading
@heading.setter
def heading(self, new_value):
"""Set the plane's heading in radians."""
if not isinstance(new_value, (int, float)):
raise TypeError("Heading must be a number.")
new_value %= math.pi * 2
self._heading = new_value
@property
def heading_degrees(self):
"""Get the plane's heading in degrees."""
return math.degrees(self.heading)
@heading_degrees.setter
def heading_degrees(self, new_value):
"""Set the plane's heading in degrees."""
self.heading = math.radians(new_value)
@property
def pitch(self):
"""Get the plane's pitch in radians."""
return self._pitch
@pitch.setter
def pitch(self, new_value):
"""Set the plane's pitch in radians."""
if not isinstance(new_value, (int, float)):
raise TypeError("Pitch must be a number.")
self._pitch = new_value
@property
def pitch_degrees(self):
"""Get the plane's pitch in degrees."""
return math.degrees(self._pitch)
@pitch_degrees.setter
def pitch_degrees(self, new_value):
"""Set the plane's pitch in degrees."""
self.pitch = math.radians(new_value)
@property
def speed(self):
"""Get the plane's speed in m/s."""
return self._speed
@speed.setter
def speed(self, new_value):
"""Set the plane's speed in m/s."""
if not isinstance(new_value, (int, float)):
raise TypeError("Speed must be a number.")
self._speed = new_value
@property
def horizontal_velocity(self):
"""Get the plane's horizontal speed in m/s."""
return self.speed * math.cos(self.pitch)
horizontal_speed = horizontal_velocity
@property
def vertical_velocity(self):
"""Get the plane's vertical speed in m/s."""
return self.speed * math.sin(self.pitch)
@property
def gravity(self):
"""Get the plane's gravity-caused vertical speed drop in m/s."""
return self._gravity
@gravity.setter
def gravity(self, new_value):
"""Set the plane's gravity-caused vertical speed drop in m/s."""
if not isinstance(new_value, (int, float)):
raise TypeError("Gravity must be a number.")
self._gravity = new_value
@property
def total_vertical_velocity(self):
"""Get the plane's total vertical speed in m/s."""
return self.vertical_velocity - self.gravity
@property
def acceleration(self):
"""Get the plane's acceleration in m/s."""
return self._acceleration
@acceleration.setter
def acceleration(self, new_value):
"""Set the plane's acceleration in m/s."""
if not isinstance(new_value, (int, float)):
raise ValueError("Acceleration must be a number")
self._acceleration = new_value
@property
def throttle(self):
"""Get the plane's throttle in m/s."""
return self._throttle
@throttle.setter
def throttle(self, new_value):
"""Set the plane's throttle in m/s."""
if not isinstance(new_value, (int, float)):
raise ValueError("Throttle must be a number")
if new_value < 0:
new_value = 0
elif new_value > 100:
new_value = 100
self._throttle = new_value
@property
def roll(self):
"""Get the plane's horizontal roll in radians."""
return math.radians(self.roll_degrees)
@property
def roll_degrees(self):
"""Get the plane's horizontal roll in degrees."""
return ((35/198) * self._roll_level**3 + (470/99)
* self._roll_level)
@property
def roll_level(self):
"""Get the plane's horizontal roll level."""
return self._roll_level
@roll_level.setter
def roll_level(self, new_value):
"""Set the plane's horizontal roll level."""
if not isinstance(new_value, (int, float)):
raise TypeError("Roll Level must be a number.")
if new_value < -4:
new_value = -4
elif new_value > 4:
new_value = 4
self._roll_level = new_value
@property
def vertical_roll_level(self):
"""Get the plane's vertical roll level."""
return self._vertical_roll_level
@vertical_roll_level.setter
def vertical_roll_level(self, new_value):
"""Set the plane's vertical roll level."""
if not isinstance(new_value, (int, float)):
raise TypeError("Vertical Roll Level must be a number.")
if new_value < -4:
new_value = -4
elif new_value > 4:
new_value = 4
self._vertical_roll_level = new_value
@property
def autopilot_enabled(self):
"""Get the plane's autopilot's status."""
if not self._autopilot_info['enabled']:
return False
else: # See if the autopilot can be disabled
if abs(self.roll_level) < 0.1:
self.roll_level = 0
self._autopilot_info['conditions'][
'roll-centered'] = True
if abs(self.vertical_roll_level) < 0.1:
self.vertical_roll_level = 0
self._autopilot_info['conditions'][
'vertical-roll-centered'] = True
if abs(50 - self.throttle) < 1:
self.throttle = 50
self._autopilot_info['conditions'][
'throttle-centered'] = True
if all(self._autopilot_info['conditions'].values()):
self._autopilot_info['enabled'] = False
return self._autopilot_info['enabled']
@property
def health(self):
"""Get the plane's health."""
return self._health
@health.setter
def health(self, new_value):
"""Set the plane's health."""
if not isinstance(new_value, (int, float)):
raise TypeError("Health must be a number.")
self._health = new_value
@property
def damage(self):
"""Get the plane's damage."""
return 100-self._health
@property
def points(self):
"""Get the plane's score."""
return self._points
@points.setter
def points(self, new_value):
"""Set the plane's score."""
if not isinstance(new_value, (int, float)):
raise TypeError("Score must be a number.")
self._points = new_value
score = points # score is an alias for points.
@property
def image(self):
"""Get the plane's image."""
return self._image
@property
def rect(self):
"""Get the plane's rect."""
return pygame.rect.Rect(self._pos, self._size)
def enable_autopilot(self):
"""Enable the autopilot."""
self._autopilot_info['enabled'] = True
for condition in self._autopilot_info['conditions']:
self._autopilot_info['conditions'][condition] = False
def draw(self, client, airspace):
"""Draw the airplane."""
image = pygame.transform.rotate(
client.scaled_images['navmarker'], -self.heading_degrees)
draw_rect = image.get_rect()
draw_rect.center = (
self.x / airspace.width * client.airspace_rect.width
+ client.airspace_rect.left,
self.z / airspace.height * client.airspace_rect.height
+ client.airspace_rect.top
)
client.screen.blit(image, draw_rect)
def update(self):
"""Update the plane."""
tick_duration = time.time() - self._time
self._time = time.time()
# initialize damage
damage = 0
# stall and gravity
if self.speed <= (self.MAX_SPEED / 5):
max_vert_roll = max((self.speed-(self.MAX_SPEED / 10))
/ (self.MAX_SPEED / 40), 0)
else: max_vert_roll = 4
self.gravity += (((self.MAX_SPEED / 10 - self.speed)
/ self.MAX_SPEED * self.TERMINAL_VELOCITY)
- (self.gravity ** 2
/ (self.TERMINAL_VELOCITY ** 2 / 10)))
if self.gravity < 0:
self.gravity = 0
if self.altitude <= 0.1:
self.gravity = 0
# get heading and pitch
self.heading += (self.roll * tick_duration)
if self.vertical_roll_level > max_vert_roll:
self.vertical_roll_level = max_vert_roll
self.pitch_degrees = self.vertical_roll_level * 10
# acceleration
self.acceleration = (self.throttle**2 / 250
- self.speed**2 * 40 / self.MAX_SPEED**2)
self.speed += (self.acceleration * tick_duration)
# move plane
hspeed = self.horizontal_speed * tick_duration
vspeed = self.total_vertical_velocity * tick_duration
self.x += math.sin(self.heading) * hspeed
self.z -= math.cos(self.heading) * hspeed
self.altitude += vspeed
if self.altitude < 0.1:
self.altitude = 0
# overspeed damage
if self.speed > self.MAX_SPEED * 0.75:
damage += ((self.speed - self.MAX_SPEED*0.75) ** 2
/ (self.MAX_SPEED**2*10) * tick_duration)
if self._throttle > 75:
damage += (self._throttle - 75) ** 2 / 1000 * tick_duration
# autopilot
if self.autopilot_enabled:
self.roll_level *= (0.5 ** tick_duration)
self.vertical_roll_level *= (0.5 ** tick_duration)
self._throttle = 50 + (self.throttle-50) * (
0.5 ** tick_duration)
# deal damage
self.health -= damage
# Function that approximates the 5, 10, 20, 30
# roll of Slight Fimulator 1.0
get_roll = lambda s, r: (35/198) * r**3 + (470/99) * r
get_pitch = lambda s, r: 10*r
class Objective(pygame.sprite.Sprite):
"""The class for an objective sprite."""
NEXT_ID = 0
LABELS = "ID:\tX:\tY:\tALT:\t"
def __init__(self, x=(0, 0, 0, 0, 0), z=None, width=None,
height=None, altitude=None, obj_id=None):
"""Initialize the instance."""
super(Objective, self).__init__()
if z is None:
x, z, width, height, altitude = x
elif width is None:
altitude = z
x, z, width, height = x
elif height is None:
altitude = width
width, height = z
x, z = x
if obj_id is None: # Get an ID for the objective
self._id = Objective.NEXT_ID
Objective.NEXT_ID += 1
else: self._id = obj_id
# Initialize private variables
self._pos = [x, z]
self._size = [width, height]
self._altitude = altitude
def __repr__(self, show_labels=True):
"""Display some important stats about the objective."""
msg = "%i\t%i\t%i\t%i\t" % (self.id_, self.x, self.z,
self.altitude)
if show_labels:
return "%s\n%s" % (self.labels(), msg)
else:
return msg
@property
def id_(self):
"""Get the objective's ID."""
return self._id
@property
def pos(self):
"""Get the objective's (x, z) position in metres."""
return self._pos
@pos.setter
def pos(self, new_value):
"""Set the objective's (x, z) position in metres."""
if not isinstance(new_value, (list, tuple)):
raise TypeError("Position must be a list or a tuple.")
if len(new_value) != 2:
raise ValueError("Position must contain two values.")
if not isinstance(new_value[0], (int, float)):
raise ValueError("X must be a number.")
if not isinstance(new_value[1], (int, float)):
raise ValueError("Z must be a number.")
self._pos = new_value
@property
def x(self):
"""Get the objective's x coordinate in metres."""
return self._pos[0]
@x.setter
def x(self, new_value):
"""Set the objective's x coordinate in metres."""
if not isinstance(new_value, (int, float)):
raise ValueError("X must be a number")
self._pos[0] = new_value
@property
def z(self):
"""Get the objective's z coordinate in metres."""
return self._pos[1]
@z.setter
def z(self, new_value):
"""Set the objective's z coordinate in metres."""
if not isinstance(new_value, (int, float)):
raise ValueError("Z must be a number")
self._pos[1] = new_value
@property
def altitude(self):
"""Get the objective's altitude in metres."""
return self._altitude
@altitude.setter
def altitude(self, new_value):
"""Set the objective's altitude in metres."""
if not isinstance(new_value, (int, float)):
raise TypeError("Altitude must be a number.")
self._altitude = new_value
y = altitude
@property
def image(self):
"""Get the objective's image."""
return self._image
@property
def rect(self):
"""Get the plane's rect."""
return pygame.rect.Rect(self._pos, self._size)
def draw(self, client, airspace):
"""Draw the objective."""
draw_rect = client.scaled_images['objectivemarker'].get_rect()
draw_rect.center = (
self.x / airspace.width * client.airspace_rect.width
+ client.airspace_rect.left,
self.z / airspace.height * client.airspace_rect.height
+ client.airspace_rect.top
)
client.screen.blit(
client.scaled_images['objectivemarker'], draw_rect)
class AdvancedSpriteGroup(pygame.sprite.Group):
"""A Pygame sprite group, except you can index it."""
def __init__(self, *args, **kw):
"""Initialize the instance."""
super(AdvancedSpriteGroup, self).__init__(*args, **kw)
def __getitem__(self, key):
"""Get the sprite at key."""
for sprite in self:
if sprite.id_ == key:
return sprite
raise KeyError("Item {} not found.".format(key))
| gpl-3.0 | 8,012,471,856,915,746,000 | 34.5625 | 72 | 0.564148 | false | 3.935313 | false | false | false |
pankajp/pyface | examples/tasks/advanced/i_python_editor.py | 5 | 1868 | #------------------------------------------------------------------------------
# Copyright (c) 2005, Enthought, Inc.
# All rights reserved.
#
# This software is provided without warranty under the terms of the BSD
# license included in enthought/LICENSE.txt and may be redistributed only
# under the conditions described in the aforementioned license. The license
# is also available online at http://www.enthought.com/licenses/BSD.txt
# Thanks for using Enthought open source!
#
# Author: Enthought, Inc.
# Description: <Enthought pyface package component>
#------------------------------------------------------------------------------
""" A widget for editing Python code. """
# Enthought library imports.
from traits.api import Bool, Event, Instance, File, Interface, Unicode
from pyface.tasks.i_editor import IEditor
# Local imports.
from pyface.key_pressed_event import KeyPressedEvent
class IPythonEditor(IEditor):
""" A widget for editing Python code. """
#### 'IPythonEditor' interface ############################################
# Object being editor is a file
obj = Instance(File)
# The pathname of the file being edited.
path = Unicode
# Should line numbers be shown in the margin?
show_line_numbers = Bool(True)
#### Events ####
# The contents of the editor has changed.
changed = Event
# A key has been pressed.
key_pressed = Event(KeyPressedEvent)
###########################################################################
# 'IPythonEditor' interface.
###########################################################################
def load(self, path=None):
""" Loads the contents of the editor. """
def save(self, path=None):
""" Saves the contents of the editor. """
def select_line(self, lineno):
""" Selects the specified line. """
| bsd-3-clause | -6,642,441,349,226,259,000 | 31.206897 | 79 | 0.558887 | false | 4.729114 | false | false | false |
watchdogpolska/feder | feder/letters/logs/migrations/0002_auto_20170820_1447.py | 1 | 1032 | # Generated by Django 1.11.4 on 2017-08-20 14:47
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [("logs", "0001_initial")]
operations = [
migrations.AddField(
model_name="emaillog",
name="status",
field=models.CharField(
choices=[
(b"open", "Open"),
(b"ok", "Open"),
(b"spambounce", "Open"),
(b"softbounce", "Open"),
(b"hardbounce", "Open"),
(b"dropped", "Open"),
(b"deferred", "Deferred"),
(b"unknown", "Unknown"),
],
default=b"unknown",
max_length=20,
),
),
migrations.AddField(
model_name="emaillog",
name="to",
field=models.CharField(default="", max_length=255, verbose_name="To"),
preserve_default=False,
),
]
| mit | -8,732,192,234,525,978,000 | 28.485714 | 82 | 0.440891 | false | 4.566372 | false | false | false |
pulsar-chem/Pulsar-Core | test/math/TestMathSet.py | 1 | 4763 | import pulsar as psr
def run_test():
tester=psr.PyTester("Testing MathSet and MathSet Iterator C++ Interface")
U1=psr.DoubleUniverse([1.0,2.0,3.0])
U2, U3=psr.DoubleUniverse([3.0,4.0,5.0]),psr.DoubleUniverse([3.0])
U4=psr.DoubleUniverse([9.0,51.0,100.0])
#Constructors
S1=psr.DoubleSet(U1,{2});S2=psr.DoubleSet(S1)
tester.test_equal("Constructor 1 and copy constructor work",S1,S2)
U1.insert(4.0);
tester.test_equal("Shallow copy of universe",S1,S2)
S3,S4=psr.DoubleSet(U2,True),psr.DoubleSet(U2,False)
tester.test_return("Constructor 2",True,False,S3.__eq__,S4)
B2,B3=psr.DoubleSet(S1,True),psr.DoubleSet(U1,True)
tester.test_return("Copy and fill works",True,True,B2.__eq__,B3)
S5=psr.DoubleSet(U2,{0,1,2})
tester.test_equal("Fill constructor works",S3,S5);
S8=psr.DoubleSet(S2.clone())
U1.insert(5.0)
tester.test_return("Clone is not aliased",True,False,S8.__eq__,S2)
#Access and manipulation
tester.test_return("Get universe works",True,U1,S1.get_universe)
tester.test_return("As universe",True,U3,S1.as_universe)
tester.test_return("Size works",True,1,S8.size)
tester.test_return("count element false",True,False,S2.count,15.0)
tester.test_return("count element true",True,True,S3.count,3.0)
tester.test_return("count index false",True,False,S2.count_idx,15)
tester.test_return("count index true",True,True,S3.count_idx,0)
vals=[3.0,4.0,5.0]
itercheck=[i for i in S5]
tester.test_equal("iterators work",vals,itercheck)
tester.test_return("idx valid",True,0,S3.idx,3.0)
tester.test_call("idx invalid",False,S8.idx,55.0)
S11=psr.DoubleSet(U1,{2,3})
tester.test_return("insert by valid elem",True,S11,S1.insert,4.0)
tester.test_call("insert by invalid elem",False,S1.insert,55.0)
tester.test_return("insert by valid index",True,S11,S2.insert_idx,3)
tester.test_call("insert by invalid index",False,S2.insert_idx,99)
S9=psr.DoubleSet(U1,{1,2,3})
S10=psr.DoubleSet(U1,{1,2,4})
S12=psr.DoubleSet(U1,{1,2})
S13=psr.DoubleSet(U1,{3})
S99=psr.DoubleSet(U4,{1})
tester.test_return("union",True,S9,S1.set_union,S12)
tester.test_call("union fail",False,S1.set_union,S99)
tester.test_return("union assign",True,S9,S1.union_assign,S12)
tester.test_call("union assign fail",False,S1.union_assign,S99)
tester.test_return("intersection",True,S12,S1.intersection,S10)
tester.test_call("intersection fail",False,S1.intersection,S99)
tester.test_return("intersection assign",True,S12,S1.intersection_assign,S10)
tester.test_call("intersection assign fail",False,S1.intersection_assign,S99)
tester.test_return("difference",True,S13,S2.difference,S12)
tester.test_call("difference fail",False,S2.difference,S99)
tester.test_return("difference assign",True,S13,S2.difference_assign,S12)
tester.test_call("difference assign fail",False,S2.difference_assign,S99)
S14=psr.DoubleSet(U1,True)
S14-=S2
tester.test_return("complement",True,S14,S2.complement);
#Set comparisons
tester.test_return("subset equal",True,True,S2.is_subset_of,S13)
tester.test_return("subset true",True,True,S2.is_subset_of,S9)
tester.test_return("subset false",True,False,S9.is_subset_of,S2)
tester.test_return("proper subset equal",True,False,S2.is_proper_subset_of,S13)
tester.test_return("proper subset true",True,True,S2.is_proper_subset_of,S9)
tester.test_return("proper subset false",True,False,S9.is_proper_subset_of,S2)
tester.test_return("superset equal",True,True,S2.is_superset_of,S13)
tester.test_return("superset true",True,True,S9.is_superset_of,S2)
tester.test_return("superset false",True,False,S2.is_superset_of,S9)
tester.test_return("proper superset equal",True,False,S2.is_proper_superset_of,S13)
tester.test_return("proper superset true",True,True,S9.is_proper_superset_of,S2)
tester.test_return("proper superset false",True,False,S2.is_proper_superset_of,S9)
tester.test_return("not equal",True,True,S2.__ne__,S14)
#Manipulations
transresults=[4.0,6.0]
def transfxn(in_val):
return 2.0*in_val
NewS=S1.transform(transfxn)
tresults2=[i for i in NewS]
tester.test_equal("transform works",transresults,tresults2)
def partfxn(in_val):
return in_val==2.0
NewS2=S1.partition(partfxn)
partresults=[2.0]
presults2=[i for i in NewS2]
tester.test_equal("partition works",partresults,presults2)
tester.test_return("hash works check 1",True,S2.my_hash(),S13.my_hash)
S2.clear()
tester.test_return("clear works",True,0,S2.size)
tester.print_results()
return tester.nfailed()
| bsd-3-clause | 4,405,880,014,321,268,000 | 46.63 | 87 | 0.69557 | false | 2.737356 | true | false | false |
Yukarumya/Yukarum-Redfoxes | ipc/ipdl/test/cxx/genIPDLUnitTests.py | 1 | 4210 | # This Source Code Form is subject to the terms of the Mozilla Public
# License, v. 2.0. If a copy of the MPL was not distributed with this
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
import string, sys
def usage():
print >>sys.stderr, """
%s template_file -t unit_tests... -e extra_protocols...
TEMPLATE_FILE is used to generate to generate the unit-tester .cpp
UNIT_TESTS are the top-level protocols defining unit tests
EXTRA_PROTOCOLS are top-level protocols for subprocesses that can be
spawned in tests but are not unit tests in and of
themselves
"""% (sys.argv[0])
sys.exit(1)
def main(argv):
template = argv[1]
if argv[2] != '-t': usage()
i = 3
unittests = []
while argv[i] != '-e':
unittests.append(argv[i])
i += 1
extras = argv[(i+1):]
includes = '\n'.join([
'#include "%s.h"'% (t) for t in unittests ])
enum_values = '\n'.join([
' %s,'% (t) for t in unittests+extras ])
last_enum = unittests[-1]
string_to_enums = '\n'.join([
''' else if (!strcmp(aString, "%s"))
return %s;'''% (t, t) for t in unittests+extras ])
enum_to_strings = '\n'.join([
''' case %s:
return "%s";'''%(t, t) for t in unittests+extras ])
parent_delete_cases = '\n'.join([
''' case %s: {
delete reinterpret_cast<%sParent*>(gParentActor);
return;
}
'''% (t, t) for t in unittests ])
parent_enabled_cases_proc = '\n'.join([
''' case %s: {
if (!%sParent::RunTestInProcesses()) {
passed("N/A to proc");
DeferredParentShutdown();
return;
}
break;
}
''' % (t, t) for t in unittests ])
parent_main_cases_proc = '\n'.join([
''' case %s: {
%sParent** parent =
reinterpret_cast<%sParent**>(&gParentActor);
*parent = new %sParent();
(*parent)->Open(transport, child);
return (*parent)->Main();
}
'''% (t, t, t, t) for t in unittests ])
parent_enabled_cases_thread = '\n'.join([
''' case %s: {
if (!%sParent::RunTestInThreads()) {
passed("N/A to threads");
DeferredParentShutdown();
return;
}
break;
}
''' % (t, t) for t in unittests ])
parent_main_cases_thread = '\n'.join([
''' case %s: {
%sParent** parent =
reinterpret_cast<%sParent**>(&gParentActor);
*parent = new %sParent();
%sChild** child =
reinterpret_cast<%sChild**>(&gChildActor);
*child = new %sChild();
::mozilla::ipc::MessageChannel *childChannel = (*child)->GetIPCChannel();
::mozilla::ipc::Side parentSide =
::mozilla::ipc::ParentSide;
(*parent)->Open(childChannel, childMessageLoop, parentSide);
return (*parent)->Main();
}
'''% (t, t, t, t, t, t, t) for t in unittests ])
child_delete_cases = '\n'.join([
''' case %s: {
delete reinterpret_cast<%sChild*>(gChildActor);
return;
}
'''% (t, t) for t in unittests+extras ])
child_init_cases = '\n'.join([
''' case %s: {
%sChild** child =
reinterpret_cast<%sChild**>(&gChildActor);
*child = new %sChild();
(*child)->Open(transport, parentPid, worker);
return;
}
'''% (t, t, t, t) for t in unittests+extras ])
templatefile = open(template, 'r')
sys.stdout.write(
string.Template(templatefile.read()).substitute(
INCLUDES=includes,
ENUM_VALUES=enum_values, LAST_ENUM=last_enum,
STRING_TO_ENUMS=string_to_enums,
ENUM_TO_STRINGS=enum_to_strings,
PARENT_DELETE_CASES=parent_delete_cases,
PARENT_ENABLED_CASES_PROC=parent_enabled_cases_proc,
PARENT_MAIN_CASES_PROC=parent_main_cases_proc,
PARENT_ENABLED_CASES_THREAD=parent_enabled_cases_thread,
PARENT_MAIN_CASES_THREAD=parent_main_cases_thread,
CHILD_DELETE_CASES=child_delete_cases,
CHILD_INIT_CASES=child_init_cases))
templatefile.close()
if __name__ == '__main__':
main(sys.argv)
| mpl-2.0 | -1,677,121,679,776,948,700 | 28.858156 | 81 | 0.547268 | false | 3.459326 | true | false | false |
kartoza/geonode | geonode/catalogue/backends/pycsw_local.py | 3 | 6624 | # -*- coding: utf-8 -*-
#########################################################################
#
# Copyright (C) 2016 OSGeo
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
#########################################################################
import os
from lxml import etree
from django.conf import settings
from ConfigParser import SafeConfigParser
from owslib.iso import MD_Metadata
from pycsw import server
from geonode.catalogue.backends.generic import CatalogueBackend as GenericCatalogueBackend
from geonode.catalogue.backends.generic import METADATA_FORMATS
from shapely.geometry.base import ReadingError
true_value = 'true'
if settings.DATABASES['default']['ENGINE'].endswith(('sqlite', 'sqlite3', 'spatialite',)):
true_value = '1'
# pycsw settings that the user shouldn't have to worry about
CONFIGURATION = {
'server': {
'home': '.',
'url': settings.CATALOGUE['default']['URL'],
'encoding': 'UTF-8',
'language': settings.LANGUAGE_CODE,
'maxrecords': '10',
# 'loglevel': 'DEBUG',
# 'logfile': '/tmp/pycsw.log',
# 'federatedcatalogues': 'http://geo.data.gov/geoportal/csw/discovery',
# 'pretty_print': 'true',
# 'domainquerytype': 'range',
'domaincounts': 'true',
'profiles': 'apiso,ebrim',
},
'repository': {
'source': 'geonode',
'filter': 'is_published = %s' % true_value,
'mappings': os.path.join(os.path.dirname(__file__), 'pycsw_local_mappings.py')
}
}
class CatalogueBackend(GenericCatalogueBackend):
def __init__(self, *args, **kwargs):
super(CatalogueBackend, self).__init__(*args, **kwargs)
self.catalogue.formats = ['Atom', 'DIF', 'Dublin Core', 'ebRIM', 'FGDC', 'ISO']
self.catalogue.local = True
def remove_record(self, uuid):
pass
def create_record(self, item):
pass
def get_record(self, uuid):
results = self._csw_local_dispatch(identifier=uuid)
if len(results) < 1:
return None
result = etree.fromstring(results).find('{http://www.isotc211.org/2005/gmd}MD_Metadata')
if result is None:
return None
record = MD_Metadata(result)
record.keywords = []
if hasattr(record, 'identification') and hasattr(record.identification, 'keywords'):
for kw in record.identification.keywords:
record.keywords.extend(kw['keywords'])
record.links = {}
record.links['metadata'] = self.catalogue.urls_for_uuid(uuid)
record.links['download'] = self.catalogue.extract_links(record)
return record
def search_records(self, keywords, start, limit, bbox):
with self.catalogue:
lresults = self._csw_local_dispatch(keywords, keywords, start+1, limit, bbox)
# serialize XML
e = etree.fromstring(lresults)
self.catalogue.records = \
[MD_Metadata(x) for x in e.findall('//{http://www.isotc211.org/2005/gmd}MD_Metadata')]
# build results into JSON for API
results = [self.catalogue.metadatarecord2dict(doc) for v, doc in self.catalogue.records.iteritems()]
result = {'rows': results,
'total': e.find('{http://www.opengis.net/cat/csw/2.0.2}SearchResults').attrib.get(
'numberOfRecordsMatched'),
'next_page': e.find('{http://www.opengis.net/cat/csw/2.0.2}SearchResults').attrib.get(
'nextRecord')
}
return result
def _csw_local_dispatch(self, keywords=None, start=0, limit=10, bbox=None, identifier=None):
"""
HTTP-less CSW
"""
# serialize pycsw settings into SafeConfigParser
# object for interaction with pycsw
mdict = dict(settings.PYCSW['CONFIGURATION'], **CONFIGURATION)
if 'server' in settings.PYCSW['CONFIGURATION']:
# override server system defaults with user specified directives
mdict['server'].update(settings.PYCSW['CONFIGURATION']['server'])
config = SafeConfigParser()
for section, options in mdict.iteritems():
config.add_section(section)
for option, value in options.iteritems():
config.set(section, option, value)
# fake HTTP environment variable
os.environ['QUERY_STRING'] = ''
# init pycsw
csw = server.Csw(config, version='2.0.2')
# fake HTTP method
csw.requesttype = 'GET'
# fake HTTP request parameters
if identifier is None: # it's a GetRecords request
formats = []
for f in self.catalogue.formats:
formats.append(METADATA_FORMATS[f][0])
csw.kvp = {
'service': 'CSW',
'version': '2.0.2',
'elementsetname': 'full',
'typenames': formats,
'resulttype': 'results',
'constraintlanguage': 'CQL_TEXT',
'outputschema': 'http://www.isotc211.org/2005/gmd',
'constraint': None,
'startposition': start,
'maxrecords': limit
}
response = csw.getrecords()
else: # it's a GetRecordById request
csw.kvp = {
'service': 'CSW',
'version': '2.0.2',
'request': 'GetRecordById',
'id': identifier,
'outputschema': 'http://www.isotc211.org/2005/gmd',
}
# FIXME(Ariel): Remove this try/except block when pycsw deals with
# empty geometry fields better.
# https://gist.github.com/ingenieroariel/717bb720a201030e9b3a
try:
response = csw.dispatch()
except ReadingError:
return []
if isinstance(response, list): # pycsw 2.0+
response = response[1]
return response
| gpl-3.0 | 5,982,822,532,873,200,000 | 36.213483 | 112 | 0.578653 | false | 4.119403 | true | false | false |
BoGoEngine/bogo-win | interfaces/gen/_6A160507_C2CC_4C37_A435_B4D645642BDD_0_0_0.py | 1 | 15081 | # -*- coding: mbcs -*-
typelib_path = 'd:\\bogo\\bogo-win32\\interfaces\\tsf.tlb'
_lcid = 0 # change this if required
from ctypes import *
from comtypes import GUID
from comtypes import IUnknown
from comtypes import GUID
from ctypes import HRESULT
from comtypes import BSTR
from ctypes.wintypes import HKL
from comtypes import helpstring
from comtypes import COMMETHOD
from comtypes import dispid
TfGuidAtom = c_ulong
from comtypes import CoClass
UINT_PTR = c_ulong
class ITfInputProcessorProfiles(IUnknown):
_case_insensitive_ = True
_iid_ = GUID('{1F02B6C5-7842-4EE6-8A0B-9A24183A95CA}')
_idlflags_ = []
class IEnumGUID(IUnknown):
_case_insensitive_ = True
_iid_ = GUID('{0002E000-0000-0000-C000-000000000046}')
_idlflags_ = []
class IEnumTfLanguageProfiles(IUnknown):
_case_insensitive_ = True
_iid_ = GUID('{3D61BF11-AC5F-42C8-A4CB-931BCC28C744}')
_idlflags_ = []
def __iter__(self):
return self
def next(self):
item, fetched = self.Next(1)
if fetched:
return item
raise StopIteration
def __getitem__(self, index):
self.Reset()
self.Skip(index)
item, fetched = self.Next(1)
if fetched:
return item
raise IndexError(index)
ITfInputProcessorProfiles._methods_ = [
COMMETHOD([], HRESULT, 'Register',
( ['in'], POINTER(GUID), 'rclsid' )),
COMMETHOD([], HRESULT, 'Unregister',
( ['in'], POINTER(GUID), 'rclsid' )),
COMMETHOD([], HRESULT, 'AddLanguageProfile',
( ['in'], POINTER(GUID), 'rclsid' ),
( ['in'], c_ushort, 'langid' ),
( ['in'], POINTER(GUID), 'guidProfile' ),
( ['in'], POINTER(c_ushort), 'pchDesc' ),
( ['in'], c_ulong, 'cchDesc' ),
( ['in'], POINTER(c_ushort), 'pchIconFile' ),
( ['in'], c_ulong, 'cchFile' ),
( ['in'], c_ulong, 'uIconIndex' )),
COMMETHOD([], HRESULT, 'RemoveLanguageProfile',
( ['in'], POINTER(GUID), 'rclsid' ),
( ['in'], c_ushort, 'langid' ),
( ['in'], POINTER(GUID), 'guidProfile' )),
COMMETHOD([], HRESULT, 'EnumInputProcessorInfo',
( ['out'], POINTER(POINTER(IEnumGUID)), 'ppenum' )),
COMMETHOD([], HRESULT, 'GetDefaultLanguageProfile',
( ['in'], c_ushort, 'langid' ),
( ['in'], POINTER(GUID), 'catid' ),
( ['out'], POINTER(GUID), 'pclsid' ),
( ['out'], POINTER(GUID), 'pguidProfile' )),
COMMETHOD([], HRESULT, 'SetDefaultLanguageProfile',
( ['in'], c_ushort, 'langid' ),
( ['in'], POINTER(GUID), 'rclsid' ),
( ['in'], POINTER(GUID), 'guidProfiles' )),
COMMETHOD([], HRESULT, 'ActivateLanguageProfile',
( ['in'], POINTER(GUID), 'rclsid' ),
( ['in'], c_ushort, 'langid' ),
( ['in'], POINTER(GUID), 'guidProfiles' )),
COMMETHOD([], HRESULT, 'GetActiveLanguageProfile',
( ['in'], POINTER(GUID), 'rclsid' ),
( ['out'], POINTER(c_ushort), 'plangid' ),
( ['out'], POINTER(GUID), 'pguidProfile' )),
COMMETHOD([], HRESULT, 'GetLanguageProfileDescription',
( ['in'], POINTER(GUID), 'rclsid' ),
( ['in'], c_ushort, 'langid' ),
( ['in'], POINTER(GUID), 'guidProfile' ),
( ['out'], POINTER(BSTR), 'pbstrProfile' )),
COMMETHOD([], HRESULT, 'GetCurrentLanguage',
( ['out'], POINTER(c_ushort), 'plangid' )),
COMMETHOD([], HRESULT, 'ChangeCurrentLanguage',
( ['in'], c_ushort, 'langid' )),
COMMETHOD([], HRESULT, 'GetLanguageList',
( ['out'], POINTER(POINTER(c_ushort)), 'ppLangId' ),
( ['out'], POINTER(c_ulong), 'pulCount' )),
COMMETHOD([], HRESULT, 'EnumLanguageProfiles',
( ['in'], c_ushort, 'langid' ),
( ['out'], POINTER(POINTER(IEnumTfLanguageProfiles)), 'ppenum' )),
COMMETHOD([], HRESULT, 'EnableLanguageProfile',
( ['in'], POINTER(GUID), 'rclsid' ),
( ['in'], c_ushort, 'langid' ),
( ['in'], POINTER(GUID), 'guidProfile' ),
( ['in'], c_int, 'fEnable' )),
COMMETHOD([], HRESULT, 'IsEnabledLanguageProfile',
( ['in'], POINTER(GUID), 'rclsid' ),
( ['in'], c_ushort, 'langid' ),
( ['in'], POINTER(GUID), 'guidProfile' ),
( ['out'], POINTER(c_int), 'pfEnable' )),
COMMETHOD([], HRESULT, 'EnableLanguageProfileByDefault',
( ['in'], POINTER(GUID), 'rclsid' ),
( ['in'], c_ushort, 'langid' ),
( ['in'], POINTER(GUID), 'guidProfile' ),
( ['in'], c_int, 'fEnable' )),
COMMETHOD([], HRESULT, 'SubstituteKeyboardLayout',
( ['in'], POINTER(GUID), 'rclsid' ),
( ['in'], c_ushort, 'langid' ),
( ['in'], POINTER(GUID), 'guidProfile' ),
( ['in'], HKL, 'HKL' )),
]
################################################################
## code template for ITfInputProcessorProfiles implementation
##class ITfInputProcessorProfiles_Impl(object):
## def EnumInputProcessorInfo(self):
## '-no docstring-'
## #return ppenum
##
## def EnumLanguageProfiles(self, langid):
## '-no docstring-'
## #return ppenum
##
## def GetDefaultLanguageProfile(self, langid, catid):
## '-no docstring-'
## #return pclsid, pguidProfile
##
## def Unregister(self, rclsid):
## '-no docstring-'
## #return
##
## def GetLanguageList(self):
## '-no docstring-'
## #return ppLangId, pulCount
##
## def GetCurrentLanguage(self):
## '-no docstring-'
## #return plangid
##
## def Register(self, rclsid):
## '-no docstring-'
## #return
##
## def ActivateLanguageProfile(self, rclsid, langid, guidProfiles):
## '-no docstring-'
## #return
##
## def RemoveLanguageProfile(self, rclsid, langid, guidProfile):
## '-no docstring-'
## #return
##
## def AddLanguageProfile(self, rclsid, langid, guidProfile, pchDesc, cchDesc, pchIconFile, cchFile, uIconIndex):
## '-no docstring-'
## #return
##
## def EnableLanguageProfile(self, rclsid, langid, guidProfile, fEnable):
## '-no docstring-'
## #return
##
## def ChangeCurrentLanguage(self, langid):
## '-no docstring-'
## #return
##
## def SubstituteKeyboardLayout(self, rclsid, langid, guidProfile, HKL):
## '-no docstring-'
## #return
##
## def IsEnabledLanguageProfile(self, rclsid, langid, guidProfile):
## '-no docstring-'
## #return pfEnable
##
## def GetLanguageProfileDescription(self, rclsid, langid, guidProfile):
## '-no docstring-'
## #return pbstrProfile
##
## def GetActiveLanguageProfile(self, rclsid):
## '-no docstring-'
## #return plangid, pguidProfile
##
## def SetDefaultLanguageProfile(self, langid, rclsid, guidProfiles):
## '-no docstring-'
## #return
##
## def EnableLanguageProfileByDefault(self, rclsid, langid, guidProfile, fEnable):
## '-no docstring-'
## #return
##
class ITfCategoryMgr(IUnknown):
_case_insensitive_ = True
_iid_ = GUID('{C3ACEFB5-F69D-4905-938F-FCADCF4BE830}')
_idlflags_ = []
ITfCategoryMgr._methods_ = [
COMMETHOD([], HRESULT, 'RegisterCategory',
( ['in'], POINTER(GUID), 'rclsid' ),
( ['in'], POINTER(GUID), 'rcatid' ),
( ['in'], POINTER(GUID), 'rguid' )),
COMMETHOD([], HRESULT, 'UnregisterCategory',
( ['in'], POINTER(GUID), 'rclsid' ),
( ['in'], POINTER(GUID), 'rcatid' ),
( ['in'], POINTER(GUID), 'rguid' )),
COMMETHOD([], HRESULT, 'EnumCategoriesInItem',
( ['in'], POINTER(GUID), 'rguid' ),
( ['out'], POINTER(POINTER(IEnumGUID)), 'ppenum' )),
COMMETHOD([], HRESULT, 'EnumItemsInCategory',
( ['in'], POINTER(GUID), 'rcatid' ),
( ['out'], POINTER(POINTER(IEnumGUID)), 'ppenum' )),
COMMETHOD([], HRESULT, 'FindClosestCategory',
( ['in'], POINTER(GUID), 'rguid' ),
( ['out'], POINTER(GUID), 'pcatid' ),
( ['in'], POINTER(POINTER(GUID)), 'ppcatidList' ),
( ['in'], c_ulong, 'ulCount' )),
COMMETHOD([], HRESULT, 'RegisterGUIDDescription',
( ['in'], POINTER(GUID), 'rclsid' ),
( ['in'], POINTER(GUID), 'rguid' ),
( ['in'], POINTER(c_ushort), 'pchDesc' ),
( ['in'], c_ulong, 'cch' )),
COMMETHOD([], HRESULT, 'UnregisterGUIDDescription',
( ['in'], POINTER(GUID), 'rclsid' ),
( ['in'], POINTER(GUID), 'rguid' )),
COMMETHOD([], HRESULT, 'GetGUIDDescription',
( ['in'], POINTER(GUID), 'rguid' ),
( ['out'], POINTER(BSTR), 'pbstrDesc' )),
COMMETHOD([], HRESULT, 'RegisterGUIDDWORD',
( ['in'], POINTER(GUID), 'rclsid' ),
( ['in'], POINTER(GUID), 'rguid' ),
( ['in'], c_ulong, 'dw' )),
COMMETHOD([], HRESULT, 'UnregisterGUIDDWORD',
( ['in'], POINTER(GUID), 'rclsid' ),
( ['in'], POINTER(GUID), 'rguid' )),
COMMETHOD([], HRESULT, 'GetGUIDDWORD',
( ['in'], POINTER(GUID), 'rguid' ),
( ['out'], POINTER(c_ulong), 'pdw' )),
COMMETHOD([], HRESULT, 'RegisterGUID',
( ['in'], POINTER(GUID), 'rguid' ),
( ['out'], POINTER(TfGuidAtom), 'pguidatom' )),
COMMETHOD([], HRESULT, 'GetGUID',
( ['in'], TfGuidAtom, 'guidatom' ),
( ['out'], POINTER(GUID), 'pguid' )),
COMMETHOD([], HRESULT, 'IsEqualTfGuidAtom',
( ['in'], TfGuidAtom, 'guidatom' ),
( ['in'], POINTER(GUID), 'rguid' ),
( ['out'], POINTER(c_int), 'pfEqual' )),
]
################################################################
## code template for ITfCategoryMgr implementation
##class ITfCategoryMgr_Impl(object):
## def RegisterGUIDDescription(self, rclsid, rguid, pchDesc, cch):
## '-no docstring-'
## #return
##
## def IsEqualTfGuidAtom(self, guidatom, rguid):
## '-no docstring-'
## #return pfEqual
##
## def GetGUIDDescription(self, rguid):
## '-no docstring-'
## #return pbstrDesc
##
## def RegisterCategory(self, rclsid, rcatid, rguid):
## '-no docstring-'
## #return
##
## def UnregisterGUIDDescription(self, rclsid, rguid):
## '-no docstring-'
## #return
##
## def FindClosestCategory(self, rguid, ppcatidList, ulCount):
## '-no docstring-'
## #return pcatid
##
## def GetGUIDDWORD(self, rguid):
## '-no docstring-'
## #return pdw
##
## def UnregisterGUIDDWORD(self, rclsid, rguid):
## '-no docstring-'
## #return
##
## def RegisterGUIDDWORD(self, rclsid, rguid, dw):
## '-no docstring-'
## #return
##
## def RegisterGUID(self, rguid):
## '-no docstring-'
## #return pguidatom
##
## def UnregisterCategory(self, rclsid, rcatid, rguid):
## '-no docstring-'
## #return
##
## def EnumCategoriesInItem(self, rguid):
## '-no docstring-'
## #return ppenum
##
## def GetGUID(self, guidatom):
## '-no docstring-'
## #return pguid
##
## def EnumItemsInCategory(self, rcatid):
## '-no docstring-'
## #return ppenum
##
class FakeClass(CoClass):
_reg_clsid_ = GUID('{DEC2C382-120C-4D57-BEDA-9C15678C863F}')
_idlflags_ = []
_typelib_path_ = typelib_path
_reg_typelib_ = ('{6A160507-C2CC-4C37-A435-B4D645642BDD}', 0, 0)
FakeClass._com_interfaces_ = [ITfInputProcessorProfiles, ITfCategoryMgr]
class __MIDL___MIDL_itf_tsf_0006_0001_0001(Structure):
pass
__MIDL___MIDL_itf_tsf_0006_0001_0001._fields_ = [
('Data1', c_ulong),
('Data2', c_ushort),
('Data3', c_ushort),
('Data4', c_ubyte * 8),
]
assert sizeof(__MIDL___MIDL_itf_tsf_0006_0001_0001) == 16, sizeof(__MIDL___MIDL_itf_tsf_0006_0001_0001)
assert alignment(__MIDL___MIDL_itf_tsf_0006_0001_0001) == 4, alignment(__MIDL___MIDL_itf_tsf_0006_0001_0001)
IEnumGUID._methods_ = [
COMMETHOD([], HRESULT, 'RemoteNext',
( ['in'], c_ulong, 'celt' ),
( ['out'], POINTER(GUID), 'rgelt' ),
( ['out'], POINTER(c_ulong), 'pceltFetched' )),
COMMETHOD([], HRESULT, 'Skip',
( ['in'], c_ulong, 'celt' )),
COMMETHOD([], HRESULT, 'Reset'),
COMMETHOD([], HRESULT, 'Clone',
( ['out'], POINTER(POINTER(IEnumGUID)), 'ppenum' )),
]
################################################################
## code template for IEnumGUID implementation
##class IEnumGUID_Impl(object):
## def Reset(self):
## '-no docstring-'
## #return
##
## def Skip(self, celt):
## '-no docstring-'
## #return
##
## def Clone(self):
## '-no docstring-'
## #return ppenum
##
## def RemoteNext(self, celt):
## '-no docstring-'
## #return rgelt, pceltFetched
##
class TF_LANGUAGEPROFILE(Structure):
_recordinfo_ = ('{6A160507-C2CC-4C37-A435-B4D645642BDD}', 0, 0, 0L, '{E1B5808D-1E46-4C19-84DC-68C5F5978CC8}')
IEnumTfLanguageProfiles._methods_ = [
COMMETHOD([], HRESULT, 'Clone',
( ['out'], POINTER(POINTER(IEnumTfLanguageProfiles)), 'ppenum' )),
COMMETHOD([], HRESULT, 'Next',
( ['in'], c_ulong, 'ulCount' ),
( ['out'], POINTER(TF_LANGUAGEPROFILE), 'pProfile' ),
( ['out'], POINTER(c_ulong), 'pcFetch' )),
COMMETHOD([], HRESULT, 'Reset'),
COMMETHOD([], HRESULT, 'Skip',
( ['in'], c_ulong, 'ulCount' )),
]
################################################################
## code template for IEnumTfLanguageProfiles implementation
##class IEnumTfLanguageProfiles_Impl(object):
## def Reset(self):
## '-no docstring-'
## #return
##
## def Skip(self, ulCount):
## '-no docstring-'
## #return
##
## def Clone(self):
## '-no docstring-'
## #return ppenum
##
## def Next(self, ulCount):
## '-no docstring-'
## #return pProfile, pcFetch
##
TF_LANGUAGEPROFILE._fields_ = [
('clsid', GUID),
('langid', c_ushort),
('catid', GUID),
('fActive', c_int),
('guidProfile', GUID),
]
assert sizeof(TF_LANGUAGEPROFILE) == 56, sizeof(TF_LANGUAGEPROFILE)
assert alignment(TF_LANGUAGEPROFILE) == 4, alignment(TF_LANGUAGEPROFILE)
class Library(object):
name = u'TSF'
_reg_typelib_ = ('{6A160507-C2CC-4C37-A435-B4D645642BDD}', 0, 0)
__all__ = ['ITfInputProcessorProfiles', 'FakeClass',
'IEnumTfLanguageProfiles', 'TfGuidAtom',
'TF_LANGUAGEPROFILE', 'UINT_PTR', 'IEnumGUID',
'ITfCategoryMgr', '__MIDL___MIDL_itf_tsf_0006_0001_0001']
from comtypes import _check_version; _check_version('501')
| gpl-3.0 | 4,124,624,088,685,188,600 | 35.693431 | 116 | 0.531861 | false | 3.302891 | false | false | false |
husman/WoTrack | apps/wordtrack/lyrics_track.py | 1 | 4034 | import string
import nltk
from nltk.collocations import *
from nltk.tokenize import word_tokenize
from apps.wordtrack.levenshtein_reduce import LevenshteinReduce
from spotify.utils.spotify_client import SpotifyClient
class LyricsTrack(object):
def __init__(self, lyrics):
"""
Converts the input lyrics to Spotify tracks
:param lyrics: (str) lyrics
"""
self.lyrics = lyrics.lower()
self.original_lyrics = self.lyrics
self.spotify_client = SpotifyClient()
self.last_ngrams = []
self.acceptable_levenshtein = 3
self.acquired_tracks = []
self.ngram_degree = 3
def get_tracks_for_ngram(self, ngrams):
"""
Makes a search request to Spotify using the
terms of the ngrams as the search query
:param ngrams: (list) ngrams
:param tracks: (list) spotify tracks
"""
return [
{
'phrase': ngram,
'tracks': self.spotify_client.get_tracks(ngram),
} for ngram in ngrams
]
def convert_phrase_to_track(self, ngram_tracks, lyrics):
"""
Given the tracks retrieved from Spotify for each ngram,
a Levenshtein Reduce mapping is applied to the tracks
and phrases from the input lyrics.
:param ngram_tracks: (list) ngram_tracks
:param lyrics: (str) lyrics
:return lyrics: (str) consumed phrases are removed from lyrics
"""
phrase_to_tracks = []
for ngram_track in ngram_tracks:
phrase_to_tracks.append(LevenshteinReduce(
phrase=ngram_track['phrase'],
tracks=ngram_track['tracks']
).get_most_similar_track())
for track in phrase_to_tracks:
if track and track['levenshtein'] <= self.acceptable_levenshtein:
self.acquired_tracks.append(track)
lyrics = lyrics.replace(track['phrase'], '').strip()
return lyrics
def process(self, ngram_degree=3):
"""
Processes the lyrics into Spotify tracks. The lyrics is processed recursively
:param ngram_degree: (int) the greatest degree of ngrams to use.
"""
self.ngram_degree = ngram_degree
self._remove_punctuations()
if ngram_degree == 3:
ngrams = self._get_trigrams_with_collocation_pmi_for_lyrics()
elif ngram_degree == 2:
ngrams = self._get_bigrams_with_collocation_pmi_for_lyrics()
else:
ngrams = self.lyrics.split(' ')
self.last_ngrams = ngrams
ngram_tracks = self.get_tracks_for_ngram(ngrams)
self.lyrics = self.convert_phrase_to_track(ngram_tracks, self.lyrics)
if self.lyrics.strip() != '':
if len(self.last_ngrams) == len(ngrams):
self.acceptable_levenshtein += 1
self.ngram_degree -= 1
self.process(self.ngram_degree)
def get_tracks(self):
"""
:return tracks: (list) the tracks best matching the lyrics.
"""
return self.acquired_tracks
def _get_bigrams_with_collocation_pmi_for_lyrics(self):
bigram_measures = nltk.collocations.BigramAssocMeasures()
finder = BigramCollocationFinder.from_words(word_tokenize(self.lyrics))
bi_phraseme = finder.score_ngrams(bigram_measures.pmi)
phrasemes = ["%s %s" % (phrase[0][0], phrase[0][1]) for phrase in bi_phraseme]
return phrasemes
def _get_trigrams_with_collocation_pmi_for_lyrics(self):
trigram_measures = nltk.collocations.TrigramAssocMeasures()
finder = TrigramCollocationFinder.from_words(word_tokenize(self.lyrics))
tri_phraseme = finder.score_ngrams(trigram_measures.pmi)
phrasemes = ["%s %s %s" % (phrase[0][0], phrase[0][1], phrase[0][2]) for phrase in tri_phraseme]
return phrasemes
def _remove_punctuations(self):
for c in string.punctuation:
self.lyrics = self.lyrics.replace(c, '')
| mit | -120,597,018,416,039,790 | 35.672727 | 104 | 0.612543 | false | 3.637511 | false | false | false |
shanot/imp | modules/display/test/test_colormap.py | 2 | 4345 | from __future__ import print_function
import IMP
import IMP.test
import IMP.core
import IMP.display
import io
import re
class Tests(IMP.test.TestCase):
def assertColorEqual(self, c, red, green, blue, delta=1e-6):
self.assertAlmostEqual(c.get_red(), red, delta=delta)
self.assertAlmostEqual(c.get_green(), green, delta=delta)
self.assertAlmostEqual(c.get_blue(), blue, delta=delta)
def test_color(self):
"""Test Color class"""
c = IMP.display.Color()
self.assertColorEqual(c, -1.0, -1.0, -1.0)
c = IMP.display.Color(0.1, 0.2, 0.3)
self.assertColorEqual(c, 0.1, 0.2, 0.3)
c.show()
for bad in range(3):
rgb = [0.5, 0.5, 0.5]
rgb[bad] = -1.0
self.assertRaisesUsageException(IMP.display.Color, *rgb)
rgb[bad] = 2.0
self.assertRaisesUsageException(IMP.display.Color, *rgb)
def test_get_interpolated_rgb(self):
"""Test get_interpolated_rgb()"""
a = IMP.display.Color(0.1, 0.2, 0.3)
b = IMP.display.Color(0.4, 0.9, 0.8)
# c == a when f=0
c = IMP.display.get_interpolated_rgb(a, b, 0.)
self.assertColorEqual(c, 0.1, 0.2, 0.3)
# c == b when f=1
c = IMP.display.get_interpolated_rgb(a, b, 1.)
self.assertColorEqual(c, 0.4, 0.9, 0.8)
c = IMP.display.get_interpolated_rgb(a, b, 0.4)
self.assertColorEqual(c, 0.22, 0.48, 0.5)
def test_get_linear_color_map_value(self):
"""Test get_linear_color_map_value()"""
self.assertAlmostEqual(IMP.display.get_linear_color_map_value(
10, 40, 30), 0.66, delta=0.1)
self.assertAlmostEqual(IMP.display.get_linear_color_map_value(
10, 40, 50), 1.0, delta=0.1)
self.assertAlmostEqual(IMP.display.get_linear_color_map_value(
10, 40, -50), 0.0, delta=0.1)
self.assertRaisesUsageException(
IMP.display.get_linear_color_map_value, 100, 50, 70)
def test_get_display_color(self):
"""Test get_display_color()"""
self.assertColorEqual(IMP.display.get_display_color(0),
166./255., 206./255., 227./255.)
self.assertColorEqual(IMP.display.get_display_color(105),
253./255., 191./255., 111./255.)
def test_get_jet_color(self):
"""Test the jet color map"""
self.assertColorEqual(IMP.display.get_jet_color(0.), 0., 0., 1.)
self.assertColorEqual(IMP.display.get_jet_color(1.), 0., 0., 1.)
self.assertColorEqual(IMP.display.get_jet_color(0.5), 1., 0.5, 0.)
# Some rounding error over 1.0 should be OK
self.assertColorEqual(IMP.display.get_jet_color(1.0001), 0., 0., 1.)
# Check out of range condition
self.assertRaisesUsageException(IMP.display.get_jet_color, -1.0)
self.assertRaisesUsageException(IMP.display.get_jet_color, 1.1)
def test_get_rgb_color(self):
"""Test the rgb color map"""
self.assertColorEqual(IMP.display.get_rgb_color(0.), 0., 0., 1.)
self.assertColorEqual(IMP.display.get_rgb_color(1.), 1., 0., 0.)
self.assertColorEqual(IMP.display.get_rgb_color(0.5), 0., 1., 0.)
def test_get_hot_color(self):
"""Test the hot color map"""
self.assertColorEqual(IMP.display.get_hot_color(0.), 0., 0., 0.)
self.assertColorEqual(IMP.display.get_hot_color(1.), 1., 1., 1.)
self.assertColorEqual(IMP.display.get_hot_color(0.5), 1., 0.5, 0.)
def test_get_gray_color(self):
"""Test the gray color map"""
self.assertColorEqual(IMP.display.get_gray_color(0.), 0., 0., 0.)
self.assertColorEqual(IMP.display.get_gray_color(1.), 1., 1., 1.)
self.assertColorEqual(IMP.display.get_gray_color(0.5), 0.5, 0.5, 0.5)
def test_get_gnuplot_color(self):
"""Test the gnuplot color map"""
self.assertColorEqual(IMP.display.get_gnuplot_color(0.), 0., 0., 0.)
self.assertColorEqual(IMP.display.get_gnuplot_color(1.), 1., 1., 0.)
self.assertColorEqual(IMP.display.get_gnuplot_color(0.5),
0.675, 0.125, 0.3)
if __name__ == '__main__':
IMP.test.main()
| gpl-3.0 | -8,221,678,464,184,445,000 | 41.598039 | 78 | 0.576525 | false | 3.16691 | true | false | false |
heihachi/PokemonGo-Bot | pokemongo_bot/test/spin_fort_test.py | 4 | 2151 | import os
import pickle
import unittest
from mock import MagicMock, patch
from pokemongo_bot.cell_workers.spin_fort import SpinFort
from pokemongo_bot.inventory import Items
config = {
"spin_wait_min": 0,
"spin_wait_max": 0,
"daily_spin_limit": 100,
}
response_dict = {'responses':
{'FORT_SEARCH': {
'experience_awarded': 50,
'items_awarded': [
{'item_id': 1, 'item_count': 1},
{'item_id': 1, 'item_count': 1},
{'item_id': 1, 'item_count': 1}
],
'result': 1,
'cooldown_complete_timestamp_ms': 1474592183629L,
'chain_hack_sequence_number': 1}
},
'status_code': 1,
'platform_returns': [
{'type': 6, 'response': 'CAE='}
],
'request_id': 4916374460149268503L
}
items_awarded = {u'Pokeball': 4}
egg_awarded = None
experience_awarded = 50
class SpinFortTestCase(unittest.TestCase):
def setUp(self):
self.patcherPokemonGoBot = patch('pokemongo_bot.PokemonGoBot')
self.bot = self.patcherPokemonGoBot.start()
forts_path = os.path.join(os.path.dirname(__file__),
'resources', 'example_forts.pickle')
with open(forts_path, 'rb') as forts:
ex_forts = pickle.load(forts)
self.patcherFortRange = patch('pokemongo_bot.cell_workers.spin_fort.SpinFort.get_forts_in_range')
self.fort_range = self.patcherFortRange.start()
self.fort_range.return_value = ex_forts
self.patcherInventoryItem = patch('pokemongo_bot.inventory.Items')
self.inventory_item = self.patcherInventoryItem.start()
def tearDown(self):
self.patcherPokemonGoBot.stop()
self.patcherFortRange.stop()
self.patcherInventoryItem.stop()
# @patch('pokemongo_bot.cell_workers.spin_fort.SpinFort.get_items_awarded_from_fort_spinned')
# def test_spin_fort(self, items_awarded):
# spin_fort = SpinFort(self.bot, config)
# self.bot.api = MagicMock()
# self.bot.api.fort_search.return_value = response_dict
# items_awarded.return_value = items_awarded
# result = spin_fort.work()
# self.assertEqual(result, 1)
| mit | 7,088,395,926,541,534,000 | 30.632353 | 105 | 0.635053 | false | 3.153959 | false | false | false |
V-FEXrt/Pokemon-Spoof-Plus | Pokemon/pokemon_team.py | 1 | 3569 | import random
from Utilities.text_converter import *
from pokemon import Pokemon
from pokemon_species import Species
def pokemon_type_block_encode(pokemon):
length = len(pokemon)
if(length > 6):
raise ValueError("Cannot have more than 6 Pokemon")
out = []
out.append(length)
for i in range(6):
if(i < length):
out.append(pokemon[i].species.hex)
else:
out.append(0xFF)
out.append(0xFF)
return out
def pokemon_type_block_decode(bytes):
pokemon_count = bytes[0]
species = []
for i in range(pokemon_count):
species.append(Species.fromBytes(bytes[i+1]))
return [pokemon_count, species]
def trainer_name_encode(name):
if len(name) > 7:
raise ValueError("Name cannot be longer than 7 characters")
return padTo(terminate(encode(name)), 0x00, 11)
def trainer_name_decode(bytes):
if len(bytes) is not 11:
print "Warning trainer name data should be 11 bytes"
return decode(unterminate(removePad(bytes, 0)))
def extend(bytes, arr):
for a in arr:
bytes.append(a)
class PokemonTeam():
def __init__(self, name, pokemon):
self.name = name
self.pokemon = pokemon
if len(name) > 7:
raise ValueError("Name cannot be longer than 7 characters")
if len(pokemon) > 6:
raise ValueError("Cannot have more than 6 Pokemon")
def __str__(self):
out = "Trainer: " + self.name + "\n"
for p in self.pokemon:
out += p.__str__() + "\n"
return out
def trade_pokemon(self, idx, pokemon):
self.pokemon[idx] = Pokemon.fromBytes(pokemon.toBytes())
def toBytes(self):
dataBlock = []
extend(dataBlock, trainer_name_encode(self.name))
extend(dataBlock, pokemon_type_block_encode(self.pokemon))
length = len(self.pokemon)
for i in range(6):
if (i < length):
extend(dataBlock, self.pokemon[i].toBytes())
else:
# Fill with 0 bytes
extend(dataBlock, padTo([], 0x00, 44))
for i in range(6):
if (i < length):
extend(dataBlock, trainer_name_encode(self.pokemon[i].originalTrainerName))
else:
# Fill with 0 bytes
extend(dataBlock, padTo([], 0x00, 11))
for i in range(6):
if (i < length):
extend(dataBlock, self.pokemon[i].terminatedNickname())
else:
# Fill with 0 bytes
extend(dataBlock, padTo([], 0x00, 11))
return dataBlock
@staticmethod
def fromBytes(bytes):
trainer_name = trainer_name_decode(bytes[0:11])
meta = pokemon_type_block_decode(bytes[11:19])
pokemon = []
byte_idx = 19
for i in range(meta[0]):
pokemon.append(Pokemon.fromBytes(bytes[byte_idx:byte_idx+44]))
byte_idx += 44
byte_idx = 283
for i in range(meta[0]):
pokemon[i].originalTrainerName = trainer_name_decode(bytes[byte_idx:byte_idx+11])
byte_idx += 11
byte_idx = 349
for i in range(meta[0]):
pokemon[i].setNickname(bytes[byte_idx:byte_idx+11])
byte_idx += 11
return PokemonTeam(trainer_name, pokemon)
@staticmethod
def rnd():
pkmn_cnt = random.randint(1, 3) + 3
pkmn = []
for i in range(pkmn_cnt):
pkmn.append(Pokemon.rnd())
return PokemonTeam("HACKER", pkmn)
| mit | -6,775,644,329,612,869,000 | 26.244275 | 93 | 0.568507 | false | 3.656762 | false | false | false |
jamesgk/ufo2fdk | Lib/ufo2ft/filters/flattenComponents.py | 2 | 1865 | from fontTools.misc.transform import Transform
from ufo2ft.filters import BaseFilter
import logging
logger = logging.getLogger(__name__)
class FlattenComponentsFilter(BaseFilter):
def __call__(self, font, glyphSet=None):
if super(FlattenComponentsFilter, self).__call__(font, glyphSet):
modified = self.context.modified
if modified:
logger.info('Flattened composite glyphs: %i' %
len(modified))
return modified
def filter(self, glyph):
flattened = False
if not glyph.components:
return flattened
pen = glyph.getPen()
for comp in list(glyph.components):
flattened_tuples = _flattenComponent(self.context.glyphSet, comp)
if flattened_tuples[0] != (comp.baseGlyph, comp.transformation):
flattened = True
glyph.removeComponent(comp)
for flattened_tuple in flattened_tuples:
pen.addComponent(*flattened_tuple)
if flattened:
self.context.modified.add(glyph.name)
return flattened
def _flattenComponent(glyphSet, component):
"""Returns a list of tuples (baseGlyph, transform) of nested component."""
glyph = glyphSet[component.baseGlyph]
if not glyph.components:
transformation = Transform(*component.transformation)
return [(component.baseGlyph, transformation)]
all_flattened_components = []
for nested in glyph.components:
flattened_components = _flattenComponent(glyphSet, nested)
for i, (_, tr) in enumerate(flattened_components):
tr = tr.transform(component.transformation)
flattened_components[i] = (flattened_components[i][0], tr)
all_flattened_components.extend(flattened_components)
return all_flattened_components
| mit | 736,537,670,217,299,000 | 34.865385 | 78 | 0.647185 | false | 4.6625 | false | false | false |
AnhellO/DAS_Sistemas | Ago-Dic-2018/Ruben Campos/Practica 2/user.py | 1 | 1274 | class User():
def __init__(self, first_name, last_name, phone, email, twitter):
self.first_name = first_name
self.last_name = last_name
self.phone = phone
self.email = email
self.twitter = twitter
def describe_user(self):
print("The user first name is: {} \nThe user last name is: {} \nThe user phone is: {} \nThe user email is: {} \nThe user Twitter is: {}".format(self.first_name,self.last_name,self.phone,self.email,self.twitter))
def greet_user(self):
print("Hey", self.first_name, "have a nice day!")
user_1 = User("Jonathan","Castillo", 5559864, "jonatillo@gmail.com", "@Jonatillo")
user_2 = User("Terry","Flores", 5552148, "Teero1@gmail.com", "@Ter_ser")
user_3 = User("Mary","Adams", 5559794, "maryni@gmail.com", "@mar_y")
user_4 = User("Hugo","Jacobo", 5556444, "HugeJA@gmail.com", "@Hugo_tarugo")
list = [user_1, user_2, user_3, user_4]
for i in list:
i.describe_user()
i.greet_user()
print("")
"""
for i in range(len(list)):
list[i].describe_user()
list[i].greet_user()
print("")
"""
"""
user_1.describe_user()
user_1.greet_user()
user_2.describe_user()
user_2.greet_user()
user_3.describe_user()
user_3.greet_user()
user_4.describe_user()
user_4.greet_user()
"""
| mit | 8,742,256,785,471,636,000 | 28.627907 | 219 | 0.624804 | false | 2.728051 | false | false | false |
beiko-lab/gengis | bin/Lib/site-packages/wx-2.8-msw-unicode/wx/glcanvas.py | 2 | 5669 | # This file was created automatically by SWIG 1.3.29.
# Don't modify this file, modify the SWIG interface instead.
"""
`GLCanvas` provides an OpenGL Context on a `wx.Window`.
"""
import _glcanvas
import new
new_instancemethod = new.instancemethod
def _swig_setattr_nondynamic(self,class_type,name,value,static=1):
if (name == "thisown"): return self.this.own(value)
if (name == "this"):
if type(value).__name__ == 'PySwigObject':
self.__dict__[name] = value
return
method = class_type.__swig_setmethods__.get(name,None)
if method: return method(self,value)
if (not static) or hasattr(self,name):
self.__dict__[name] = value
else:
raise AttributeError("You cannot add attributes to %s" % self)
def _swig_setattr(self,class_type,name,value):
return _swig_setattr_nondynamic(self,class_type,name,value,0)
def _swig_getattr(self,class_type,name):
if (name == "thisown"): return self.this.own()
method = class_type.__swig_getmethods__.get(name,None)
if method: return method(self)
raise AttributeError,name
def _swig_repr(self):
try: strthis = "proxy of " + self.this.__repr__()
except: strthis = ""
return "<%s.%s; %s >" % (self.__class__.__module__, self.__class__.__name__, strthis,)
import types
try:
_object = types.ObjectType
_newclass = 1
except AttributeError:
class _object : pass
_newclass = 0
del types
def _swig_setattr_nondynamic_method(set):
def set_attr(self,name,value):
if (name == "thisown"): return self.this.own(value)
if hasattr(self,name) or (name == "this"):
set(self,name,value)
else:
raise AttributeError("You cannot add attributes to %s" % self)
return set_attr
import _core
wx = _core
__docfilter__ = wx.__DocFilter(globals())
class GLContext(_core.Object):
"""Proxy of C++ GLContext class"""
thisown = property(lambda x: x.this.own(), lambda x, v: x.this.own(v), doc='The membership flag')
__repr__ = _swig_repr
def __init__(self, *args, **kwargs):
"""__init__(self, GLCanvas win, GLContext other=None) -> GLContext"""
_glcanvas.GLContext_swiginit(self,_glcanvas.new_GLContext(*args, **kwargs))
__swig_destroy__ = _glcanvas.delete_GLContext
__del__ = lambda self : None;
def SetCurrent(*args, **kwargs):
"""SetCurrent(self, GLCanvas win)"""
return _glcanvas.GLContext_SetCurrent(*args, **kwargs)
_glcanvas.GLContext_swigregister(GLContext)
cvar = _glcanvas.cvar
GLCanvasNameStr = cvar.GLCanvasNameStr
WX_GL_RGBA = _glcanvas.WX_GL_RGBA
WX_GL_BUFFER_SIZE = _glcanvas.WX_GL_BUFFER_SIZE
WX_GL_LEVEL = _glcanvas.WX_GL_LEVEL
WX_GL_DOUBLEBUFFER = _glcanvas.WX_GL_DOUBLEBUFFER
WX_GL_STEREO = _glcanvas.WX_GL_STEREO
WX_GL_AUX_BUFFERS = _glcanvas.WX_GL_AUX_BUFFERS
WX_GL_MIN_RED = _glcanvas.WX_GL_MIN_RED
WX_GL_MIN_GREEN = _glcanvas.WX_GL_MIN_GREEN
WX_GL_MIN_BLUE = _glcanvas.WX_GL_MIN_BLUE
WX_GL_MIN_ALPHA = _glcanvas.WX_GL_MIN_ALPHA
WX_GL_DEPTH_SIZE = _glcanvas.WX_GL_DEPTH_SIZE
WX_GL_STENCIL_SIZE = _glcanvas.WX_GL_STENCIL_SIZE
WX_GL_MIN_ACCUM_RED = _glcanvas.WX_GL_MIN_ACCUM_RED
WX_GL_MIN_ACCUM_GREEN = _glcanvas.WX_GL_MIN_ACCUM_GREEN
WX_GL_MIN_ACCUM_BLUE = _glcanvas.WX_GL_MIN_ACCUM_BLUE
WX_GL_MIN_ACCUM_ALPHA = _glcanvas.WX_GL_MIN_ACCUM_ALPHA
class GLCanvas(_core.Window):
"""Proxy of C++ GLCanvas class"""
thisown = property(lambda x: x.this.own(), lambda x, v: x.this.own(v), doc='The membership flag')
__repr__ = _swig_repr
def __init__(self, *args, **kwargs):
"""
__init__(self, Window parent, int id=-1, Point pos=DefaultPosition,
Size size=DefaultSize, long style=0, String name=GLCanvasNameStr,
int attribList=None, Palette palette=wxNullPalette) -> GLCanvas
"""
_glcanvas.GLCanvas_swiginit(self,_glcanvas.new_GLCanvas(*args, **kwargs))
self._setOORInfo(self)
def SetCurrent(*args):
"""
SetCurrent(self)
SetCurrent(self, GLContext RC)
"""
return _glcanvas.GLCanvas_SetCurrent(*args)
def SetColour(*args, **kwargs):
"""SetColour(self, String colour)"""
return _glcanvas.GLCanvas_SetColour(*args, **kwargs)
def SwapBuffers(*args, **kwargs):
"""SwapBuffers(self)"""
return _glcanvas.GLCanvas_SwapBuffers(*args, **kwargs)
def GetContext(*args, **kwargs):
"""GetContext(self) -> GLContext"""
return _glcanvas.GLCanvas_GetContext(*args, **kwargs)
def SetupPixelFormat(*args, **kwargs):
"""SetupPixelFormat(self, int attribList=None)"""
return _glcanvas.GLCanvas_SetupPixelFormat(*args, **kwargs)
def SetupPalette(*args, **kwargs):
"""SetupPalette(self, Palette palette)"""
return _glcanvas.GLCanvas_SetupPalette(*args, **kwargs)
def CreateDefaultPalette(*args, **kwargs):
"""CreateDefaultPalette(self) -> Palette"""
return _glcanvas.GLCanvas_CreateDefaultPalette(*args, **kwargs)
def GetPalette(*args, **kwargs):
"""GetPalette(self) -> Palette"""
return _glcanvas.GLCanvas_GetPalette(*args, **kwargs)
Context = property(GetContext,doc="See `GetContext`")
_glcanvas.GLCanvas_swigregister(GLCanvas)
def GLCanvasWithContext(*args, **kwargs):
"""
GLCanvasWithContext(Window parent, GLContext shared=None, int id=-1, Point pos=DefaultPosition,
Size size=DefaultSize,
long style=0, String name=GLCanvasNameStr,
int attribList=None, Palette palette=wxNullPalette) -> GLCanvas
"""
val = _glcanvas.new_GLCanvasWithContext(*args, **kwargs)
val._setOORInfo(val)
return val
| gpl-3.0 | -3,196,066,498,643,484,700 | 35.10828 | 101 | 0.653025 | false | 3.286377 | false | false | false |
Fanris/PySched | PySched/PySchedServer/Scheduler/Compiler/__init__.py | 1 | 1848 | # -*- coding: utf-8 -*-
'''
Created on 2013-01-04 12:10
@summary:
@author: Martin Predki
'''
from twisted.internet import reactor
from CompilerProcessProtocol import CompilerProcessProtocol
import logging
import os
class Compiler(object):
'''
@summary: Class containing the compile logic for jobs.
'''
def __init__(self, scheduler):
'''
@summary: Initializes the compiler.
@param scheduler: A reference to the scheduler
@result:
'''
self.scheduler = scheduler
self.logger = logging.getLogger("PySchedServer")
def compileJob(self, job):
'''
@summary: Compiles a job.
@param job:
@result:
'''
# Setting up the compile process parameter
# ==============================
jobPath = os.path.join(self.scheduler.workingDir, str(job.jobId))
# parse command Template
template = job.compilerStr.split(" ")
# Start the compiler
# ==============================
self.logger.debug("Spawn process: {}".format(template))
reactor.spawnProcess(CompilerProcessProtocol(job, jobPath, self.scheduler), executable=template[0],
args=template, path=jobPath, env=os.environ)
# write a log file
# ==============================
self.logger.info("Compile process for job {} started.".format(job.jobId))
return True
def compilingCompleted(self, job):
'''
@summary: Is called when a job is compiled successful.
@param job:
@result:
'''
self.scheduler.compilingComplete(job)
def compilingFailed(self, job):
'''
@summary: Is called when a job could not be compiled
@param job:
@result:
'''
self.scheduler.compilingFailed(job)
| lgpl-3.0 | 3,638,450,454,956,575,000 | 24.666667 | 107 | 0.568723 | false | 4.431655 | false | false | false |
vhbit/rust-docset | builder.py | 1 | 1230 | from docset.index import Index
from docset import rules
import os
import shutil
def build_docset(info, ds_rules, src_dir, out_dir):
root_dir = os.path.join(out_dir, info['name'] + '.docset')
content_dir = os.path.join(root_dir, 'Contents')
resources_dir = os.path.join(content_dir, 'Resources')
doc_dir = os.path.join(resources_dir, 'Documents')
index_path = os.path.join(resources_dir, 'docSet.dsidx')
if not os.path.exists(doc_dir):
os.makedirs(doc_dir)
with open(os.path.join(content_dir, "Info.plist"), "w+t") as f:
f.write(info['plist'])
if 'icon' in info and os.path.exists(info['icon']):
shutil.copy2(info['icon'], root_dir)
idx = Index(index_path)
for root, dirnames, filenames in os.walk(src_dir):
for filename in filenames:
full_path = os.path.join(root, filename)
rel_path = os.path.relpath(full_path, src_dir)
dest_path = os.path.join(doc_dir, rel_path)
ctx = {
'src_path': full_path,
'dest_path': dest_path,
'rel_path': rel_path,
'idx': idx
}
rules.process_file_rules(ds_rules, ctx)
idx.flush()
| mit | -32,142,809,591,768,190 | 29.75 | 67 | 0.58374 | false | 3.253968 | false | false | false |
chengdh/openerp-ktv | openerp/addons/auction/report/buyer_form_report.py | 9 | 2671 | # -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2004-2010 Tiny SPRL (<http://tiny.be>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
import time
from report import report_sxw
class buyer_form_report(report_sxw.rml_parse):
count=0
c=0
def __init__(self, cr, uid, name, context):
super(buyer_form_report, self).__init__(cr, uid, name, context=context)
self.localcontext.update({
'time': time,
'sum_taxes': self.sum_taxes,
'buyerinfo' : self.buyer_info,
'grand_total' : self.grand_buyer_total,
})
def sum_taxes(self, lot):
amount=0.0
taxes=[]
if lot.author_right:
taxes.append(lot.author_right)
if lot.auction_id:
taxes += lot.auction_id.buyer_costs
tax=self.pool.get('account.tax').compute_all(self.cr, self.uid, taxes, lot.obj_price, 1)
for t in tax:
amount+=t['amount']
return amount
def buyer_info(self):
objects = [object for object in self.localcontext.get('objects')]
ret_dict = {}
for object in objects:
partner = ret_dict.get(object.ach_uid.id,False)
if not partner:
ret_dict[object.ach_uid.id] = {'partner' : object.ach_uid or False, 'lots':[object]}
else:
lots = partner.get('lots')
lots.append(object)
return ret_dict.values()
def grand_buyer_total(self,o):
grand_total = 0
for oo in o:
grand_total =grand_total + oo['obj_price'] +self.sum_taxes(oo)
return grand_total
report_sxw.report_sxw('report.buyer_form_report', 'auction.lots', 'addons/auction/report/buyer_form_report.rml', parser=buyer_form_report)
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
| agpl-3.0 | -4,328,688,751,632,081,000 | 37.710145 | 138 | 0.588544 | false | 3.826648 | false | false | false |
nmittler/grpc | src/python/src/grpc/framework/base/packets/_ingestion.py | 5 | 16408 | # Copyright 2015, Google Inc.
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above
# copyright notice, this list of conditions and the following disclaimer
# in the documentation and/or other materials provided with the
# distribution.
# * Neither the name of Google Inc. nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
"""State and behavior for ingestion during an operation."""
import abc
import collections
from grpc.framework.base import exceptions
from grpc.framework.base import interfaces
from grpc.framework.base.packets import _constants
from grpc.framework.base.packets import _interfaces
from grpc.framework.base.packets import packets
from grpc.framework.foundation import abandonment
from grpc.framework.foundation import callable_util
from grpc.framework.foundation import stream
_CREATE_CONSUMER_EXCEPTION_LOG_MESSAGE = 'Exception initializing ingestion!'
_CONSUME_EXCEPTION_LOG_MESSAGE = 'Exception during ingestion!'
class _ConsumerCreation(collections.namedtuple(
'_ConsumerCreation', ('consumer', 'remote_error', 'abandoned'))):
"""A sum type for the outcome of ingestion initialization.
Either consumer will be non-None, remote_error will be True, or abandoned will
be True.
Attributes:
consumer: A stream.Consumer for ingesting payloads.
remote_error: A boolean indicating that the consumer could not be created
due to an error on the remote side of the operation.
abandoned: A boolean indicating that the consumer creation was abandoned.
"""
class _EmptyConsumer(stream.Consumer):
"""A no-operative stream.Consumer that ignores all inputs and calls."""
def consume(self, value):
"""See stream.Consumer.consume for specification."""
def terminate(self):
"""See stream.Consumer.terminate for specification."""
def consume_and_terminate(self, value):
"""See stream.Consumer.consume_and_terminate for specification."""
class _ConsumerCreator(object):
"""Common specification of different consumer-creating behavior."""
__metaclass__ = abc.ABCMeta
@abc.abstractmethod
def create_consumer(self, requirement):
"""Creates the stream.Consumer to which customer payloads will be delivered.
Any exceptions raised by this method should be attributed to and treated as
defects in the serviced or servicer code called by this method.
Args:
requirement: A value required by this _ConsumerCreator for consumer
creation.
Returns:
A _ConsumerCreation describing the result of consumer creation.
"""
raise NotImplementedError()
class _FrontConsumerCreator(_ConsumerCreator):
"""A _ConsumerCreator appropriate for front-side use."""
def __init__(self, subscription, operation_context):
"""Constructor.
Args:
subscription: The serviced's interfaces.ServicedSubscription for the
operation.
operation_context: The interfaces.OperationContext object for the
operation.
"""
self._subscription = subscription
self._operation_context = operation_context
def create_consumer(self, requirement):
"""See _ConsumerCreator.create_consumer for specification."""
if self._subscription.kind is interfaces.ServicedSubscription.Kind.FULL:
try:
return _ConsumerCreation(
self._subscription.ingestor.consumer(self._operation_context),
False, False)
except abandonment.Abandoned:
return _ConsumerCreation(None, False, True)
else:
return _ConsumerCreation(_EmptyConsumer(), False, False)
class _BackConsumerCreator(_ConsumerCreator):
"""A _ConsumerCreator appropriate for back-side use."""
def __init__(self, servicer, operation_context, emission_consumer):
"""Constructor.
Args:
servicer: The interfaces.Servicer that will service the operation.
operation_context: The interfaces.OperationContext object for the
operation.
emission_consumer: The stream.Consumer object to which payloads emitted
from the operation will be passed.
"""
self._servicer = servicer
self._operation_context = operation_context
self._emission_consumer = emission_consumer
def create_consumer(self, requirement):
"""See _ConsumerCreator.create_consumer for full specification.
Args:
requirement: The name of the Servicer method to be called during this
operation.
Returns:
A _ConsumerCreation describing the result of consumer creation.
"""
try:
return _ConsumerCreation(
self._servicer.service(
requirement, self._operation_context, self._emission_consumer),
False, False)
except exceptions.NoSuchMethodError:
return _ConsumerCreation(None, True, False)
except abandonment.Abandoned:
return _ConsumerCreation(None, False, True)
class _WrappedConsumer(object):
"""Wraps a consumer to catch the exceptions that it is allowed to throw."""
def __init__(self, consumer):
"""Constructor.
Args:
consumer: A stream.Consumer that may raise abandonment.Abandoned from any
of its methods.
"""
self._consumer = consumer
def moar(self, payload, complete):
"""Makes progress with the wrapped consumer.
This method catches all exceptions allowed to be thrown by the wrapped
consumer. Any exceptions raised by this method should be blamed on the
customer-supplied consumer.
Args:
payload: A customer-significant payload object. May be None only if
complete is True.
complete: Whether or not the end of the payload sequence has been reached.
Must be True if payload is None.
Returns:
True if the wrapped consumer made progress or False if the wrapped
consumer raised abandonment.Abandoned to indicate its abandonment of
progress.
"""
try:
if payload is None:
self._consumer.terminate()
elif complete:
self._consumer.consume_and_terminate(payload)
else:
self._consumer.consume(payload)
return True
except abandonment.Abandoned:
return False
class _IngestionManager(_interfaces.IngestionManager):
"""An implementation of _interfaces.IngestionManager."""
def __init__(
self, lock, pool, consumer_creator, failure_kind, termination_manager,
transmission_manager):
"""Constructor.
Args:
lock: The operation-wide lock.
pool: A thread pool in which to execute customer code.
consumer_creator: A _ConsumerCreator wrapping the portion of customer code
that when called returns the stream.Consumer with which the customer
code will ingest payload values.
failure_kind: Whichever one of packets.Kind.SERVICED_FAILURE or
packets.Kind.SERVICER_FAILURE describes local failure of customer code.
termination_manager: The _interfaces.TerminationManager for the operation.
transmission_manager: The _interfaces.TransmissionManager for the
operation.
"""
self._lock = lock
self._pool = pool
self._consumer_creator = consumer_creator
self._failure_kind = failure_kind
self._termination_manager = termination_manager
self._transmission_manager = transmission_manager
self._expiration_manager = None
self._wrapped_ingestion_consumer = None
self._pending_ingestion = []
self._ingestion_complete = False
self._processing = False
def set_expiration_manager(self, expiration_manager):
self._expiration_manager = expiration_manager
def _abort_internal_only(self):
self._wrapped_ingestion_consumer = None
self._pending_ingestion = None
def _abort_and_notify(self, outcome):
self._abort_internal_only()
self._termination_manager.abort(outcome)
self._transmission_manager.abort(outcome)
self._expiration_manager.abort()
def _next(self):
"""Computes the next step for ingestion.
Returns:
A payload, complete, continue triplet indicating what payload (if any) is
available to feed into customer code, whether or not the sequence of
payloads has terminated, and whether or not there is anything
immediately actionable to call customer code to do.
"""
if self._pending_ingestion is None:
return None, False, False
elif self._pending_ingestion:
payload = self._pending_ingestion.pop(0)
complete = self._ingestion_complete and not self._pending_ingestion
return payload, complete, True
elif self._ingestion_complete:
return None, True, True
else:
return None, False, False
def _process(self, wrapped_ingestion_consumer, payload, complete):
"""A method to call to execute customer code.
This object's lock must *not* be held when calling this method.
Args:
wrapped_ingestion_consumer: The _WrappedConsumer with which to pass
payloads to customer code.
payload: A customer payload. May be None only if complete is True.
complete: Whether or not the sequence of payloads to pass to the customer
has concluded.
"""
while True:
consumption_outcome = callable_util.call_logging_exceptions(
wrapped_ingestion_consumer.moar, _CONSUME_EXCEPTION_LOG_MESSAGE,
payload, complete)
if consumption_outcome.exception is None:
if consumption_outcome.return_value:
with self._lock:
if complete:
self._pending_ingestion = None
self._termination_manager.ingestion_complete()
return
else:
payload, complete, moar = self._next()
if not moar:
self._processing = False
return
else:
with self._lock:
if self._pending_ingestion is not None:
self._abort_and_notify(self._failure_kind)
self._processing = False
return
else:
with self._lock:
self._abort_and_notify(self._failure_kind)
self._processing = False
return
def start(self, requirement):
if self._pending_ingestion is not None:
def initialize():
consumer_creation_outcome = callable_util.call_logging_exceptions(
self._consumer_creator.create_consumer,
_CREATE_CONSUMER_EXCEPTION_LOG_MESSAGE, requirement)
if consumer_creation_outcome.return_value is None:
with self._lock:
self._abort_and_notify(self._failure_kind)
self._processing = False
elif consumer_creation_outcome.return_value.remote_error:
with self._lock:
self._abort_and_notify(packets.Kind.RECEPTION_FAILURE)
self._processing = False
elif consumer_creation_outcome.return_value.abandoned:
with self._lock:
if self._pending_ingestion is not None:
self._abort_and_notify(self._failure_kind)
self._processing = False
else:
wrapped_ingestion_consumer = _WrappedConsumer(
consumer_creation_outcome.return_value.consumer)
with self._lock:
self._wrapped_ingestion_consumer = wrapped_ingestion_consumer
payload, complete, moar = self._next()
if not moar:
self._processing = False
return
self._process(wrapped_ingestion_consumer, payload, complete)
self._pool.submit(
callable_util.with_exceptions_logged(
initialize, _constants.INTERNAL_ERROR_LOG_MESSAGE))
self._processing = True
def consume(self, payload):
if self._ingestion_complete:
self._abort_and_notify(self._failure_kind)
elif self._pending_ingestion is not None:
if self._processing:
self._pending_ingestion.append(payload)
else:
self._pool.submit(
callable_util.with_exceptions_logged(
self._process, _constants.INTERNAL_ERROR_LOG_MESSAGE),
self._wrapped_ingestion_consumer, payload, False)
self._processing = True
def terminate(self):
if self._ingestion_complete:
self._abort_and_notify(self._failure_kind)
else:
self._ingestion_complete = True
if self._pending_ingestion is not None and not self._processing:
self._pool.submit(
callable_util.with_exceptions_logged(
self._process, _constants.INTERNAL_ERROR_LOG_MESSAGE),
self._wrapped_ingestion_consumer, None, True)
self._processing = True
def consume_and_terminate(self, payload):
if self._ingestion_complete:
self._abort_and_notify(self._failure_kind)
else:
self._ingestion_complete = True
if self._pending_ingestion is not None:
if self._processing:
self._pending_ingestion.append(payload)
else:
self._pool.submit(
callable_util.with_exceptions_logged(
self._process, _constants.INTERNAL_ERROR_LOG_MESSAGE),
self._wrapped_ingestion_consumer, payload, True)
self._processing = True
def abort(self):
"""See _interfaces.IngestionManager.abort for specification."""
self._abort_internal_only()
def front_ingestion_manager(
lock, pool, subscription, termination_manager, transmission_manager,
operation_context):
"""Creates an IngestionManager appropriate for front-side use.
Args:
lock: The operation-wide lock.
pool: A thread pool in which to execute customer code.
subscription: A base_interfaces.ServicedSubscription indicating the
customer's interest in the results of the operation.
termination_manager: The _interfaces.TerminationManager for the operation.
transmission_manager: The _interfaces.TransmissionManager for the
operation.
operation_context: A base_interfaces.OperationContext for the operation.
Returns:
An IngestionManager appropriate for front-side use.
"""
ingestion_manager = _IngestionManager(
lock, pool, _FrontConsumerCreator(subscription, operation_context),
packets.Kind.SERVICED_FAILURE, termination_manager, transmission_manager)
ingestion_manager.start(None)
return ingestion_manager
def back_ingestion_manager(
lock, pool, servicer, termination_manager, transmission_manager,
operation_context, emission_consumer):
"""Creates an IngestionManager appropriate for back-side use.
Args:
lock: The operation-wide lock.
pool: A thread pool in which to execute customer code.
servicer: A base_interfaces.Servicer for servicing the operation.
termination_manager: The _interfaces.TerminationManager for the operation.
transmission_manager: The _interfaces.TransmissionManager for the
operation.
operation_context: A base_interfaces.OperationContext for the operation.
emission_consumer: The _interfaces.EmissionConsumer for the operation.
Returns:
An IngestionManager appropriate for back-side use.
"""
ingestion_manager = _IngestionManager(
lock, pool, _BackConsumerCreator(
servicer, operation_context, emission_consumer),
packets.Kind.SERVICER_FAILURE, termination_manager, transmission_manager)
return ingestion_manager
| bsd-3-clause | -7,925,192,769,846,485,000 | 36.375854 | 80 | 0.698501 | false | 4.389513 | false | false | false |
Merinorus/adaisawesome | Homework/05 - Taming Text/HW05_awesometeam_Q2.py | 1 | 8304 |
# coding: utf-8
# Question 2) Find all the mentions of world countries in the whole corpus,
# using the pycountry utility (HINT: remember that there will be different surface forms
# for the same country in the text, e.g., Switzerland, switzerland, CH, etc.)
# Perform sentiment analysis on every email message using the demo methods
# in the nltk.sentiment.util module. Aggregate the polarity information of all
# the emails by country, and plot a histogram (ordered and colored by polarity level)
# that summarizes the perception of the different countries. Repeat the aggregation and plotting steps using different demo methods from the sentiment analysis module.
# Can you find substantial differences?
# In[51]:
import pandas as pd
import pycountry
from nltk.sentiment import *
import numpy as np
import matplotlib.pyplot as plt
import codecs
import math
import re
import string
# Pre Process the Data, Dropping Irrelevant Columns
# In[204]:
emails = pd.read_csv("hillary-clinton-emails/Emails.csv")
# In[205]:
# Drop columns that won't be used
emails = emails.drop(['DocNumber', 'MetadataPdfLink','DocNumber', 'ExtractedDocNumber', 'MetadataCaseNumber'], axis=1)
emails.head()
# In[206]:
emails_cut = emails[['ExtractedBodyText']].copy()
emails_cut.head()
# In[207]:
emails_cut = emails_cut.dropna()
emails_cut.head()
# Now we must tokenize the data...
# In[208]:
from nltk import word_tokenize
from nltk.tokenize import RegexpTokenizer
from nltk.corpus import stopwords
tokenizer = RegexpTokenizer(r'\w+')
# In[209]:
emails_tokenized = emails_cut.copy()
for index, row in emails_tokenized.iterrows():
row['ExtractedBodyText'] = tokenizer.tokenize(row['ExtractedBodyText'])
emails_tokenized.columns = ['TokenizedText']
emails_tokenized.reset_index(drop=True, inplace=True)
emails_tokenized.head()
# Figure out what words to remove...
# In[210]:
words_delete = ['IT', 'RE','LA','AND', 'AM', 'AT', 'IN', 'I', 'ME', 'DO',
'A', 'AN','BUT', 'IF', 'OR','AS','OF','BY', 'TO', 'UP','ON','ANY', 'NO', 'NOR', 'NOT','SO',
'S', 'T','DON','D', 'LL', 'M', 'O','VE', 'Y','PM', 'TV','CD','PA','ET', 'BY', 'IE','MS', 'MP', 'CC',
'GA','VA', 'BI','CV', 'AL','VAT', 'VA','AI', 'MD', 'SM', 'FM', 'EST', 'BB', 'BRB', 'AQ', 'MA', 'MAR', 'JAM', 'BM',
'Lybia', 'LY', 'LBY', 'MC', 'MCO', 'MO', 'MAC', 'NC', 'PG', 'PNG', 'SUR', 'VI', 'lybia', 'ARM']
emails_final = emails_tokenized.copy()
emails_final['TokenizedText'] = emails_final['TokenizedText'].apply(lambda x: [item for item in x if item not in words_delete])
emails_final.head()
# Create list of countries
# In[211]:
countries_cited = []
for emails in emails_final['TokenizedText']:
for word in emails:
try:
country_name = pycountry.countries.get(alpha_2=word)
countries_cited.append(country_name.name)
except KeyError:
try:
country_name = pycountry.countries.get(alpha_3=word)
countries_cited.append(country_name.name)
except KeyError:
try:
country = pycountry.countries.get(name=word)
countries_cited.append(country_name.name)
except KeyError: pass
# Organize List and Count Occurrence of Each Country
# In[212]:
#List with Unique Entries of Countries Cited
final_countries = list(set(countries_cited))
size = len(final_countries)
final_countries
# In[213]:
#Create New DataFrame for the Counts
Country_Sent = pd.DataFrame(index=range(0,size),columns=['Country', 'Count'])
Country_Sent['Country']=final_countries
Country_Sent.head()
# In[214]:
count_list = []
for country in Country_Sent['Country']:
count = countries_cited.count(country)
count_list.append(count)
Country_Sent['Count']=count_list
Country_Sent.head()
# In[215]:
#Take Out Countries with Less than 20 Citations
Country_Sent= Country_Sent[Country_Sent['Count'] > 14]
Country_Sent = Country_Sent.reset_index(drop=True)
Country_Sent.head()
# In[216]:
#plot to see frequencies
Country_Sent.plot.bar(x='Country', y='Count')
plt.show()
#We have repeatedly plotted this, identifying weird occurances (small countries with high counts),
#and then elimitating them from the data set and repating the process
# In[217]:
#create a list with all possible names of the countries above
countries_used_name = []
countries_used_alpha_2 =[]
countries_used_alpha_3 =[]
for country in Country_Sent['Country']:
country_names = pycountry.countries.get(name=country)
countries_used_name.append(country_names.name)
countries_used_alpha_2.append(country_names.alpha_2)
countries_used_alpha_3.append(country_names.alpha_3)
Country_Sent['Alpha_2']=countries_used_alpha_2
Country_Sent['Alpha_3']=countries_used_alpha_3
Country_Sent.head()
# In[218]:
len(Country_Sent)
# Now we check sentiment on emails around these names
# In[170]:
sentiments = []
vader_analyzer = SentimentIntensityAnalyzer()
size = len(Country_Sent['Alpha_2'])
for i in range(1,size):
country_score =[]
for email in emails_no_stop['TokenizedText']:
if Country_Sent['Alpha_2'][i] in email or Country_Sent['Alpha_3'][i] in email or Country_Sent['Country'][i] in email:
str_email = ' '.join(email)
sentiment = vader_analyzer.polarity_scores(str_email)
score = sentiment['compound']
country_score.append(score)
else: pass
if len(country_score)!=0:
sentiment_score = sum(country_score) / float(len(country_score))
sentiments.append(sentiment_score)
else:
sentiments.append(999)
# In[291]:
sentiments
# In[220]:
#error in iteration, must drop NZ because it was not taken into account in the sentiments analysis
Country_Sent = Country_Sent.drop(Country_Sent.index[[0]])
len(Country_Sent)
# In[222]:
#add sentiment list to data frame
Country_Sent['Sentiment'] = sentiments
Country_Sent.head()
# In[224]:
#delete any row with sentiment value of 999
Country_Sent = Country_Sent[Country_Sent['Sentiment'] != 999]
Country_Sent.head()
# In[226]:
#reorder dataframe in ascending order of sentiment
Country_Sent.sort_values(['Sentiment'], ascending=True, inplace=True)
Country_Sent.head()
# In[254]:
#reorder index
Country_Sent = Country_Sent.reset_index(drop=True)
Country_Sent.head()
# Now we make a color gradient for the histogram
# In[288]:
#We must normalize the sentiment scores and create a gradient based on that (green, blue & red gradient)
#first we sort the ones that are below zero, than the ones above zero
color_grad = []
size = len(Country_Sent['Sentiment'])
for i in range(0,size):
if Country_Sent['Sentiment'][i] < 0:
high = 0
low = np.min(sentiments)
rg = low-high
new_entry = (low-Country_Sent['Sentiment'][i])/rg
red = 1 - new_entry
color_grad.append((red,0,0))
else:
high = np.max(sentiments)
low = 0
rg2 = high-low
new_entry = (Country_Sent['Sentiment'][i]-low)/rg2
green = 1 - new_entry
color_grad.append((0,green,0))
Country_Sent['color_grad'] = color_grad
Country_Sent.head()
# In[289]:
#Now we create the bar plot based on this palette
import seaborn as sns
plt.figure(figsize=(30,20))
plot = sns.barplot(x='Country', y='Sentiment', data=Country_Sent, orient='vertical', palette=color_grad)
plt.ylabel('Country Sentiment');
plt.show()
# In[252]:
#Now we create a bar plot with an automatic gradient based on sentiment
size = len(Country_Sent['Sentiment'])
plt.figure(figsize=(30,20))
grad = sns.diverging_palette(10, 225, n=32)
plot = sns.barplot(x='Country', y='Sentiment', data=Country_Sent, orient='vertical', palette = grad )
plt.xticks(rotation=60);
plt.ylabel('Country Sentiment');
plt.show()
# Comment on Sentiment Data:
# Some Countries Were lost in this analysis, it is not clear why yet.
# Comments on Data Viz:
# Creating my own pallette somehow erased the nuances between countries even when the difference in
# scores was significant. The automaticallly generated palette performed much better at conveying the info.
# In[ ]:
| gpl-3.0 | -6,314,052,537,993,812,000 | 25.530351 | 167 | 0.668353 | false | 3.261587 | false | false | false |
klingebj/regreg | doc/examples/svm.py | 4 | 1152 | import numpy as np
import regreg.api as rr
np.random.seed(400)
N = 500
P = 2
Y = 2 * np.random.binomial(1, 0.5, size=(N,)) - 1.
X = np.random.standard_normal((N,P))
X[Y==1] += np.array([3,-2])[np.newaxis,:]
X_1 = np.hstack([X, np.ones((N,1))])
X_1_signs = -Y[:,np.newaxis] * X_1
transform = rr.affine_transform(X_1_signs, np.ones(N))
C = 0.2
hinge = rr.positive_part(N, lagrange=C)
hinge_loss = rr.linear_atom(hinge, transform)
quadratic = rr.quadratic.linear(rr.selector(slice(0,P), (P+1,)), coef=0.5)
problem = rr.container(quadratic, hinge_loss)
solver = rr.FISTA(problem)
solver.fit()
import pylab
pylab.clf()
pylab.scatter(X[Y==1,0],X[Y==1,1], facecolor='red')
pylab.scatter(X[Y==-1,0],X[Y==-1,1], facecolor='blue')
fits = np.dot(X_1, problem.coefs)
labels = 2 * (fits > 0) - 1
pointX = [X[:,0].min(), X[:,0].max()]
pointY = [-(pointX[0]*problem.coefs[0]+problem.coefs[2])/problem.coefs[1],
-(pointX[1]*problem.coefs[0]+problem.coefs[2])/problem.coefs[1]]
pylab.plot(pointX, pointY, linestyle='--', label='Separating hyperplane')
pylab.title("Accuracy = %0.1f %%" % (100-100 * np.fabs(labels - Y).sum() / (2 * N)))
#pylab.show()
| bsd-3-clause | -3,753,613,559,502,926,000 | 29.315789 | 84 | 0.635417 | false | 2.39501 | false | true | false |
yxd-hde/lambda-poll-update-delete | py-tornado-botocore/delete.py | 2 | 1236 | from tornado import gen
from itertools import izip_longest
from functools import partial
from operator import is_not
class Delete(object):
def __init__(self):
self.delete_count = 0
self.message_count = 0
@gen.coroutine
def _one_request(self, sqs_delete_message_batch, queue_url, ids):
self.delete_count += 1
resp = yield gen.Task(sqs_delete_message_batch.call,
QueueUrl=queue_url,
Entries=ids)
if 'Successful' in resp:
self.message_count += len(resp['Successful'])
if 'Failed' in resp:
raise Exception('failed to delete messages')
@gen.coroutine
def execute(self, sqs_delete_message_batch, queue_url, ids):
id_groups = group_by_10(ids)
r = []
for id_group in id_groups:
r.append(self._one_request(sqs_delete_message_batch,
queue_url, id_group))
yield r
def group_by_10(ids):
def grouper(iterable, n):
args = [iter(iterable)] * n
return izip_longest(*args)
def convert(t):
return list(filter(partial(is_not, None), t))
return map(convert, grouper(ids, 10))
| mit | 6,247,602,740,467,058,000 | 28.428571 | 69 | 0.575243 | false | 3.886792 | false | false | false |
looker/sentry | src/sentry/models/eventmapping.py | 2 | 1684 | """
sentry.models.groupmeta
~~~~~~~~~~~~~~~~~~~~~~~
:copyright: (c) 2010-2014 by the Sentry Team, see AUTHORS for more details.
:license: BSD, see LICENSE for more details.
"""
from __future__ import absolute_import
from django.db import models
from django.utils import timezone
from sentry.db.models import (BoundedBigIntegerField, Model, sane_repr)
class EventMapping(Model):
__core__ = False
project_id = BoundedBigIntegerField()
group_id = BoundedBigIntegerField()
event_id = models.CharField(max_length=32)
date_added = models.DateTimeField(default=timezone.now)
class Meta:
app_label = 'sentry'
db_table = 'sentry_eventmapping'
unique_together = (('project_id', 'event_id'), )
__repr__ = sane_repr('project_id', 'group_id', 'event_id')
# Implement a ForeignKey-like accessor for backwards compat
def _set_group(self, group):
self.group_id = group.id
self._group_cache = group
def _get_group(self):
from sentry.models import Group
if not hasattr(self, '_group_cache'):
self._group_cache = Group.objects.get(id=self.group_id)
return self._group_cache
group = property(_get_group, _set_group)
# Implement a ForeignKey-like accessor for backwards compat
def _set_project(self, project):
self.project_id = project.id
self._project_cache = project
def _get_project(self):
from sentry.models import Project
if not hasattr(self, '_project_cache'):
self._project_cache = Project.objects.get(id=self.project_id)
return self._project_cache
project = property(_get_project, _set_project)
| bsd-3-clause | -4,767,315,554,122,112,000 | 29.618182 | 75 | 0.6538 | false | 3.853547 | false | false | false |
deadsoft/Reklaminiai-Parduotuviu-Lankstinukai | gui.py | 1 | 71891 | # -*- coding: utf-8 -*-
"""
ReklaminiaiParduotuviųLankstinukai
Copyright (C) <2014> <Algirdas Butkus> <butkus.algirdas@gmail.com>
This program is free software: you can redistribute it and/or modify
it under the terms of the GNU Affero General Public License as
published by the Free Software Foundation, either version 3 of the
License, or (at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU Affero General Public License for more details.
You should have received a copy of the GNU Affero General Public License
along with this program. If not, see <http://www.gnu.org/licenses/>.
"""
version = 0.011
from PyQt4 import QtCore, QtGui, QtWebKit
import os
def SEP(path):
separator = os.path.sep
if separator != '/':
path = path.replace('/', os.path.sep)
return path
userdir = os.path.expanduser('~')
userprogpath = SEP('/.cache/deadprogram/')
try:
_fromUtf8 = QtCore.QString.fromUtf8
except AttributeError:
def _fromUtf8(s):
return s
try:
_encoding = QtGui.QApplication.UnicodeUTF8
def _translate(context, text, disambig):
return QtGui.QApplication.translate(context, text, disambig, _encoding)
except AttributeError:
def _translate(context, text, disambig):
return QtGui.QApplication.translate(context, text, disambig)
class Ui_MainWindow(object):
def setupUi(self, MainWindow):
MainWindow.setObjectName(_fromUtf8("MainWindow"))
MainWindow.resize(905, 636)
sizePolicy = QtGui.QSizePolicy(QtGui.QSizePolicy.Expanding, QtGui.QSizePolicy.Expanding)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(MainWindow.sizePolicy().hasHeightForWidth())
MainWindow.setSizePolicy(sizePolicy)
MainWindow.setMinimumSize(QtCore.QSize(0, 600))
font = QtGui.QFont()
font.setFamily(_fromUtf8("Sans"))
MainWindow.setFont(font)
icon = QtGui.QIcon()
icon.addPixmap(QtGui.QPixmap(_fromUtf8(userdir + userprogpath + SEP("icons/image.png"))), QtGui.QIcon.Normal, QtGui.QIcon.Off)
MainWindow.setWindowIcon(icon)
self.centralwidget = QtGui.QWidget(MainWindow)
self.centralwidget.setObjectName(_fromUtf8("centralwidget"))
self.verticalLayout_2 = QtGui.QVBoxLayout(self.centralwidget)
self.verticalLayout_2.setSpacing(6)
self.verticalLayout_2.setMargin(6)
self.verticalLayout_2.setObjectName(_fromUtf8("verticalLayout_2"))
self.gridLayout = QtGui.QGridLayout()
self.gridLayout.setSizeConstraint(QtGui.QLayout.SetMinAndMaxSize)
self.gridLayout.setSpacing(0)
self.gridLayout.setObjectName(_fromUtf8("gridLayout"))
self.horizontalLayout = QtGui.QHBoxLayout()
self.horizontalLayout.setSpacing(0)
self.horizontalLayout.setSizeConstraint(QtGui.QLayout.SetNoConstraint)
self.horizontalLayout.setObjectName(_fromUtf8("horizontalLayout"))
self.tabWidget = QtGui.QTabWidget(self.centralwidget)
self.tabWidget.setMinimumSize(QtCore.QSize(0, 27))
self.tabWidget.setBaseSize(QtCore.QSize(0, 0))
self.tabWidget.setFocusPolicy(QtCore.Qt.NoFocus)
self.tabWidget.setTabShape(QtGui.QTabWidget.Rounded)
self.tabWidget.setDocumentMode(False)
self.tabWidget.setMovable(False)
self.tabWidget.setObjectName(_fromUtf8("tabWidget"))
self.pdftab = QtGui.QWidget()
sizePolicy = QtGui.QSizePolicy(QtGui.QSizePolicy.Expanding, QtGui.QSizePolicy.Expanding)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.pdftab.sizePolicy().hasHeightForWidth())
self.pdftab.setSizePolicy(sizePolicy)
self.pdftab.setObjectName(_fromUtf8("pdftab"))
self.verticalLayout_4 = QtGui.QVBoxLayout(self.pdftab)
self.verticalLayout_4.setSpacing(0)
self.verticalLayout_4.setMargin(0)
self.verticalLayout_4.setObjectName(_fromUtf8("verticalLayout_4"))
self.horizontalLayout_5 = QtGui.QHBoxLayout()
self.horizontalLayout_5.setSpacing(1)
self.horizontalLayout_5.setContentsMargins(-1, 2, -1, 1)
self.horizontalLayout_5.setObjectName(_fromUtf8("horizontalLayout_5"))
self.comboBox_2 = QtGui.QComboBox(self.pdftab)
sizePolicy = QtGui.QSizePolicy(QtGui.QSizePolicy.Preferred, QtGui.QSizePolicy.Fixed)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.comboBox_2.sizePolicy().hasHeightForWidth())
self.comboBox_2.setSizePolicy(sizePolicy)
self.comboBox_2.setMinimumSize(QtCore.QSize(100, 0))
self.comboBox_2.setMaximumSize(QtCore.QSize(100, 23))
font = QtGui.QFont()
font.setFamily(_fromUtf8("Sans Serif"))
self.comboBox_2.setFont(font)
self.comboBox_2.setFocusPolicy(QtCore.Qt.StrongFocus)
self.comboBox_2.setObjectName(_fromUtf8("comboBox_2"))
self.comboBox_2.addItem(_fromUtf8(""))
self.horizontalLayout_5.addWidget(self.comboBox_2)
self.comboBox_3 = QtGui.QComboBox(self.pdftab)
sizePolicy = QtGui.QSizePolicy(QtGui.QSizePolicy.Preferred, QtGui.QSizePolicy.Fixed)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.comboBox_3.sizePolicy().hasHeightForWidth())
self.comboBox_3.setSizePolicy(sizePolicy)
self.comboBox_3.setMinimumSize(QtCore.QSize(100, 0))
self.comboBox_3.setMaximumSize(QtCore.QSize(100, 23))
self.comboBox_3.setFocusPolicy(QtCore.Qt.StrongFocus)
self.comboBox_3.setObjectName(_fromUtf8("comboBox_3"))
self.comboBox_3.addItem(_fromUtf8(""))
self.horizontalLayout_5.addWidget(self.comboBox_3)
self.comboBox_4 = QtGui.QComboBox(self.pdftab)
sizePolicy = QtGui.QSizePolicy(QtGui.QSizePolicy.Preferred, QtGui.QSizePolicy.Fixed)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.comboBox_4.sizePolicy().hasHeightForWidth())
self.comboBox_4.setSizePolicy(sizePolicy)
self.comboBox_4.setMinimumSize(QtCore.QSize(100, 0))
self.comboBox_4.setMaximumSize(QtCore.QSize(100, 23))
self.comboBox_4.setFocusPolicy(QtCore.Qt.StrongFocus)
self.comboBox_4.setObjectName(_fromUtf8("comboBox_4"))
self.comboBox_4.addItem(_fromUtf8(""))
self.horizontalLayout_5.addWidget(self.comboBox_4)
self.comboBox_6 = QtGui.QComboBox(self.pdftab)
sizePolicy = QtGui.QSizePolicy(QtGui.QSizePolicy.Preferred, QtGui.QSizePolicy.Fixed)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.comboBox_6.sizePolicy().hasHeightForWidth())
self.comboBox_6.setSizePolicy(sizePolicy)
self.comboBox_6.setMinimumSize(QtCore.QSize(100, 0))
self.comboBox_6.setMaximumSize(QtCore.QSize(100, 23))
self.comboBox_6.setFocusPolicy(QtCore.Qt.StrongFocus)
self.comboBox_6.setObjectName(_fromUtf8("comboBox_6"))
self.comboBox_6.addItem(_fromUtf8(""))
self.horizontalLayout_5.addWidget(self.comboBox_6)
self.comboBox_5 = QtGui.QComboBox(self.pdftab)
sizePolicy = QtGui.QSizePolicy(QtGui.QSizePolicy.Preferred, QtGui.QSizePolicy.Fixed)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.comboBox_5.sizePolicy().hasHeightForWidth())
self.comboBox_5.setSizePolicy(sizePolicy)
self.comboBox_5.setMinimumSize(QtCore.QSize(100, 0))
self.comboBox_5.setMaximumSize(QtCore.QSize(100, 23))
self.comboBox_5.setFocusPolicy(QtCore.Qt.StrongFocus)
self.comboBox_5.setObjectName(_fromUtf8("comboBox_5"))
self.comboBox_5.addItem(_fromUtf8(""))
self.horizontalLayout_5.addWidget(self.comboBox_5)
self.comboBox_7 = QtGui.QComboBox(self.pdftab)
sizePolicy = QtGui.QSizePolicy(QtGui.QSizePolicy.Preferred, QtGui.QSizePolicy.Fixed)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.comboBox_7.sizePolicy().hasHeightForWidth())
self.comboBox_7.setSizePolicy(sizePolicy)
self.comboBox_7.setMinimumSize(QtCore.QSize(130, 0))
self.comboBox_7.setMaximumSize(QtCore.QSize(100, 23))
self.comboBox_7.setFocusPolicy(QtCore.Qt.StrongFocus)
self.comboBox_7.setObjectName(_fromUtf8("comboBox_7"))
self.comboBox_7.addItem(_fromUtf8(""))
self.horizontalLayout_5.addWidget(self.comboBox_7)
self.comboBox_10 = QtGui.QComboBox(self.pdftab)
self.comboBox_10.setMinimumSize(QtCore.QSize(145, 0))
self.comboBox_10.setMaximumSize(QtCore.QSize(16777215, 23))
self.comboBox_10.setFocusPolicy(QtCore.Qt.StrongFocus)
self.comboBox_10.setObjectName(_fromUtf8("comboBox_10"))
self.comboBox_10.addItem(_fromUtf8(""))
self.horizontalLayout_5.addWidget(self.comboBox_10)
self.comboBox_11 = QtGui.QComboBox(self.pdftab)
self.comboBox_11.setMinimumSize(QtCore.QSize(100, 0))
self.comboBox_11.setMaximumSize(QtCore.QSize(100, 23))
self.comboBox_11.setFocusPolicy(QtCore.Qt.StrongFocus)
self.comboBox_11.setObjectName(_fromUtf8("comboBox_11"))
self.comboBox_11.addItem(_fromUtf8(""))
self.horizontalLayout_5.addWidget(self.comboBox_11)
spacerItem = QtGui.QSpacerItem(40, 20, QtGui.QSizePolicy.Expanding, QtGui.QSizePolicy.Minimum)
self.horizontalLayout_5.addItem(spacerItem)
self.verticalLayout_4.addLayout(self.horizontalLayout_5)
self.horizontalLayout_12 = QtGui.QHBoxLayout()
self.horizontalLayout_12.setSpacing(1)
self.horizontalLayout_12.setContentsMargins(-1, 1, -1, 2)
self.horizontalLayout_12.setObjectName(_fromUtf8("horizontalLayout_12"))
self.comboBox_12 = QtGui.QComboBox(self.pdftab)
self.comboBox_12.setMinimumSize(QtCore.QSize(100, 0))
self.comboBox_12.setMaximumSize(QtCore.QSize(100, 23))
self.comboBox_12.setFocusPolicy(QtCore.Qt.StrongFocus)
self.comboBox_12.setObjectName(_fromUtf8("comboBox_12"))
self.comboBox_12.addItem(_fromUtf8(""))
self.horizontalLayout_12.addWidget(self.comboBox_12)
self.comboBox_13 = QtGui.QComboBox(self.pdftab)
self.comboBox_13.setMinimumSize(QtCore.QSize(100, 0))
self.comboBox_13.setMaximumSize(QtCore.QSize(100, 23))
self.comboBox_13.setFocusPolicy(QtCore.Qt.StrongFocus)
self.comboBox_13.setObjectName(_fromUtf8("comboBox_13"))
self.comboBox_13.addItem(_fromUtf8(""))
self.horizontalLayout_12.addWidget(self.comboBox_13)
self.comboBox_14 = QtGui.QComboBox(self.pdftab)
self.comboBox_14.setMinimumSize(QtCore.QSize(100, 0))
self.comboBox_14.setMaximumSize(QtCore.QSize(100, 23))
self.comboBox_14.setFocusPolicy(QtCore.Qt.StrongFocus)
self.comboBox_14.setObjectName(_fromUtf8("comboBox_14"))
self.comboBox_14.addItem(_fromUtf8(""))
self.horizontalLayout_12.addWidget(self.comboBox_14)
self.comboBox_8 = QtGui.QComboBox(self.pdftab)
sizePolicy = QtGui.QSizePolicy(QtGui.QSizePolicy.Preferred, QtGui.QSizePolicy.Fixed)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.comboBox_8.sizePolicy().hasHeightForWidth())
self.comboBox_8.setSizePolicy(sizePolicy)
self.comboBox_8.setMinimumSize(QtCore.QSize(100, 0))
self.comboBox_8.setMaximumSize(QtCore.QSize(100, 23))
self.comboBox_8.setFocusPolicy(QtCore.Qt.StrongFocus)
self.comboBox_8.setObjectName(_fromUtf8("comboBox_8"))
self.comboBox_8.addItem(_fromUtf8(""))
self.horizontalLayout_12.addWidget(self.comboBox_8)
self.comboBox_9 = QtGui.QComboBox(self.pdftab)
sizePolicy = QtGui.QSizePolicy(QtGui.QSizePolicy.Preferred, QtGui.QSizePolicy.Fixed)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.comboBox_9.sizePolicy().hasHeightForWidth())
self.comboBox_9.setSizePolicy(sizePolicy)
self.comboBox_9.setMinimumSize(QtCore.QSize(100, 0))
self.comboBox_9.setMaximumSize(QtCore.QSize(100, 23))
self.comboBox_9.setFocusPolicy(QtCore.Qt.StrongFocus)
self.comboBox_9.setObjectName(_fromUtf8("comboBox_9"))
self.comboBox_9.addItem(_fromUtf8(""))
self.horizontalLayout_12.addWidget(self.comboBox_9)
spacerItem1 = QtGui.QSpacerItem(40, 20, QtGui.QSizePolicy.Expanding, QtGui.QSizePolicy.Minimum)
self.horizontalLayout_12.addItem(spacerItem1)
self.checkBox_5 = QtGui.QCheckBox(self.pdftab)
sizePolicy = QtGui.QSizePolicy(QtGui.QSizePolicy.Minimum, QtGui.QSizePolicy.Maximum)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.checkBox_5.sizePolicy().hasHeightForWidth())
self.checkBox_5.setSizePolicy(sizePolicy)
self.checkBox_5.setMaximumSize(QtCore.QSize(16777215, 23))
self.checkBox_5.setFocusPolicy(QtCore.Qt.StrongFocus)
self.checkBox_5.setText(_fromUtf8(""))
icon1 = QtGui.QIcon()
icon1.addPixmap(QtGui.QPixmap(_fromUtf8(userdir + userprogpath + SEP("icons/zoom-fit.png"))), QtGui.QIcon.Normal, QtGui.QIcon.On)
self.checkBox_5.setIcon(icon1)
self.checkBox_5.setObjectName(_fromUtf8("checkBox_5"))
self.horizontalLayout_12.addWidget(self.checkBox_5)
self.doubleSpinBox = QtGui.QDoubleSpinBox(self.pdftab)
sizePolicy = QtGui.QSizePolicy(QtGui.QSizePolicy.Fixed, QtGui.QSizePolicy.Fixed)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.doubleSpinBox.sizePolicy().hasHeightForWidth())
self.doubleSpinBox.setSizePolicy(sizePolicy)
self.doubleSpinBox.setMinimumSize(QtCore.QSize(0, 0))
self.doubleSpinBox.setMaximumSize(QtCore.QSize(16777215, 23))
self.doubleSpinBox.setPrefix(_fromUtf8(""))
self.doubleSpinBox.setSuffix(_fromUtf8(""))
self.doubleSpinBox.setDecimals(2)
self.doubleSpinBox.setMinimum(0.5)
self.doubleSpinBox.setMaximum(1.5)
self.doubleSpinBox.setSingleStep(0.05)
self.doubleSpinBox.setProperty("value", 1.0)
self.doubleSpinBox.setObjectName(_fromUtf8("doubleSpinBox"))
self.horizontalLayout_12.addWidget(self.doubleSpinBox)
self.pushButton_11 = QtGui.QPushButton(self.pdftab)
self.pushButton_11.setMaximumSize(QtCore.QSize(27, 23))
self.pushButton_11.setFocusPolicy(QtCore.Qt.StrongFocus)
self.pushButton_11.setText(_fromUtf8(""))
icon2 = QtGui.QIcon()
icon2.addPixmap(QtGui.QPixmap(_fromUtf8(userdir + userprogpath + SEP("icons/go-up.png"))), QtGui.QIcon.Normal, QtGui.QIcon.On)
self.pushButton_11.setIcon(icon2)
self.pushButton_11.setFlat(True)
self.pushButton_11.setObjectName(_fromUtf8("pushButton_11"))
self.horizontalLayout_12.addWidget(self.pushButton_11)
self.pushButton_6 = QtGui.QPushButton(self.pdftab)
self.pushButton_6.setMaximumSize(QtCore.QSize(27, 23))
self.pushButton_6.setBaseSize(QtCore.QSize(0, 27))
self.pushButton_6.setFocusPolicy(QtCore.Qt.StrongFocus)
self.pushButton_6.setText(_fromUtf8(""))
icon3 = QtGui.QIcon()
icon3.addPixmap(QtGui.QPixmap(_fromUtf8(userdir + userprogpath + SEP("icons/go-down.png"))), QtGui.QIcon.Normal, QtGui.QIcon.On)
self.pushButton_6.setIcon(icon3)
self.pushButton_6.setFlat(True)
self.pushButton_6.setObjectName(_fromUtf8("pushButton_6"))
self.horizontalLayout_12.addWidget(self.pushButton_6)
self.label_8 = QtGui.QLabel(self.pdftab)
sizePolicy = QtGui.QSizePolicy(QtGui.QSizePolicy.Maximum, QtGui.QSizePolicy.Maximum)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.label_8.sizePolicy().hasHeightForWidth())
self.label_8.setSizePolicy(sizePolicy)
self.label_8.setObjectName(_fromUtf8("label_8"))
self.horizontalLayout_12.addWidget(self.label_8)
self.verticalLayout_4.addLayout(self.horizontalLayout_12)
self.line_2 = QtGui.QFrame(self.pdftab)
self.line_2.setFrameShape(QtGui.QFrame.HLine)
self.line_2.setFrameShadow(QtGui.QFrame.Sunken)
self.line_2.setObjectName(_fromUtf8("line_2"))
self.verticalLayout_4.addWidget(self.line_2)
self.webView_2 = QtWebKit.QWebView(self.pdftab)
self.webView_2.setContextMenuPolicy(QtCore.Qt.NoContextMenu)
self.webView_2.setAutoFillBackground(False)
self.webView_2.setProperty("url", QtCore.QUrl(_fromUtf8("about:blank")))
self.webView_2.setObjectName(_fromUtf8("webView_2"))
self.verticalLayout_4.addWidget(self.webView_2)
self.tabWidget.addTab(self.pdftab, _fromUtf8(""))
self.Internettab = QtGui.QWidget()
self.Internettab.setObjectName(_fromUtf8("Internettab"))
self.verticalLayout_3 = QtGui.QVBoxLayout(self.Internettab)
self.verticalLayout_3.setSpacing(0)
self.verticalLayout_3.setMargin(0)
self.verticalLayout_3.setObjectName(_fromUtf8("verticalLayout_3"))
self.horizontalLayout_2 = QtGui.QHBoxLayout()
self.horizontalLayout_2.setSpacing(1)
self.horizontalLayout_2.setContentsMargins(0, 2, 0, 1)
self.horizontalLayout_2.setObjectName(_fromUtf8("horizontalLayout_2"))
self.Intbuttonmaxima = QtGui.QPushButton(self.Internettab)
sizePolicy = QtGui.QSizePolicy(QtGui.QSizePolicy.Minimum, QtGui.QSizePolicy.Fixed)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.Intbuttonmaxima.sizePolicy().hasHeightForWidth())
self.Intbuttonmaxima.setSizePolicy(sizePolicy)
self.Intbuttonmaxima.setMinimumSize(QtCore.QSize(0, 0))
self.Intbuttonmaxima.setMaximumSize(QtCore.QSize(16777215, 23))
self.Intbuttonmaxima.setFocusPolicy(QtCore.Qt.StrongFocus)
self.Intbuttonmaxima.setAcceptDrops(False)
self.Intbuttonmaxima.setCheckable(True)
self.Intbuttonmaxima.setChecked(False)
self.Intbuttonmaxima.setFlat(False)
self.Intbuttonmaxima.setObjectName(_fromUtf8("Intbuttonmaxima"))
self.horizontalLayout_2.addWidget(self.Intbuttonmaxima)
self.Intbuttonnorfa = QtGui.QPushButton(self.Internettab)
sizePolicy = QtGui.QSizePolicy(QtGui.QSizePolicy.Minimum, QtGui.QSizePolicy.Fixed)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.Intbuttonnorfa.sizePolicy().hasHeightForWidth())
self.Intbuttonnorfa.setSizePolicy(sizePolicy)
self.Intbuttonnorfa.setMinimumSize(QtCore.QSize(0, 0))
self.Intbuttonnorfa.setMaximumSize(QtCore.QSize(16777215, 23))
self.Intbuttonnorfa.setFocusPolicy(QtCore.Qt.StrongFocus)
self.Intbuttonnorfa.setCheckable(True)
self.Intbuttonnorfa.setFlat(False)
self.Intbuttonnorfa.setObjectName(_fromUtf8("Intbuttonnorfa"))
self.horizontalLayout_2.addWidget(self.Intbuttonnorfa)
self.Intbuttoniki = QtGui.QPushButton(self.Internettab)
sizePolicy = QtGui.QSizePolicy(QtGui.QSizePolicy.Minimum, QtGui.QSizePolicy.Fixed)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.Intbuttoniki.sizePolicy().hasHeightForWidth())
self.Intbuttoniki.setSizePolicy(sizePolicy)
self.Intbuttoniki.setMinimumSize(QtCore.QSize(0, 0))
self.Intbuttoniki.setMaximumSize(QtCore.QSize(16777215, 23))
self.Intbuttoniki.setFocusPolicy(QtCore.Qt.StrongFocus)
self.Intbuttoniki.setCheckable(True)
self.Intbuttoniki.setFlat(False)
self.Intbuttoniki.setObjectName(_fromUtf8("Intbuttoniki"))
self.horizontalLayout_2.addWidget(self.Intbuttoniki)
self.Intbuttonrimi = QtGui.QPushButton(self.Internettab)
sizePolicy = QtGui.QSizePolicy(QtGui.QSizePolicy.Minimum, QtGui.QSizePolicy.Fixed)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.Intbuttonrimi.sizePolicy().hasHeightForWidth())
self.Intbuttonrimi.setSizePolicy(sizePolicy)
self.Intbuttonrimi.setMinimumSize(QtCore.QSize(0, 0))
self.Intbuttonrimi.setMaximumSize(QtCore.QSize(16777215, 23))
self.Intbuttonrimi.setFocusPolicy(QtCore.Qt.StrongFocus)
self.Intbuttonrimi.setCheckable(True)
self.Intbuttonrimi.setFlat(False)
self.Intbuttonrimi.setObjectName(_fromUtf8("Intbuttonrimi"))
self.horizontalLayout_2.addWidget(self.Intbuttonrimi)
self.intbuttonaibe = QtGui.QPushButton(self.Internettab)
sizePolicy = QtGui.QSizePolicy(QtGui.QSizePolicy.Minimum, QtGui.QSizePolicy.Fixed)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.intbuttonaibe.sizePolicy().hasHeightForWidth())
self.intbuttonaibe.setSizePolicy(sizePolicy)
self.intbuttonaibe.setMinimumSize(QtCore.QSize(0, 0))
self.intbuttonaibe.setMaximumSize(QtCore.QSize(16777215, 23))
self.intbuttonaibe.setFocusPolicy(QtCore.Qt.StrongFocus)
self.intbuttonaibe.setCheckable(True)
self.intbuttonaibe.setObjectName(_fromUtf8("intbuttonaibe"))
self.horizontalLayout_2.addWidget(self.intbuttonaibe)
self.intbuttonFRESH_MARKET = QtGui.QPushButton(self.Internettab)
sizePolicy = QtGui.QSizePolicy(QtGui.QSizePolicy.Minimum, QtGui.QSizePolicy.Fixed)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.intbuttonFRESH_MARKET.sizePolicy().hasHeightForWidth())
self.intbuttonFRESH_MARKET.setSizePolicy(sizePolicy)
self.intbuttonFRESH_MARKET.setMinimumSize(QtCore.QSize(0, 0))
self.intbuttonFRESH_MARKET.setMaximumSize(QtCore.QSize(16777215, 23))
self.intbuttonFRESH_MARKET.setFocusPolicy(QtCore.Qt.StrongFocus)
self.intbuttonFRESH_MARKET.setCheckable(True)
self.intbuttonFRESH_MARKET.setObjectName(_fromUtf8("intbuttonFRESH_MARKET"))
self.horizontalLayout_2.addWidget(self.intbuttonFRESH_MARKET)
self.intbuttonPROMO = QtGui.QPushButton(self.Internettab)
self.intbuttonPROMO.setMaximumSize(QtCore.QSize(16777215, 23))
self.intbuttonPROMO.setFocusPolicy(QtCore.Qt.StrongFocus)
self.intbuttonPROMO.setCheckable(True)
self.intbuttonPROMO.setObjectName(_fromUtf8("intbuttonPROMO"))
self.horizontalLayout_2.addWidget(self.intbuttonPROMO)
self.intbuttonPRISMA = QtGui.QPushButton(self.Internettab)
self.intbuttonPRISMA.setMaximumSize(QtCore.QSize(16777215, 23))
self.intbuttonPRISMA.setFocusPolicy(QtCore.Qt.StrongFocus)
self.intbuttonPRISMA.setCheckable(True)
self.intbuttonPRISMA.setObjectName(_fromUtf8("intbuttonPRISMA"))
self.horizontalLayout_2.addWidget(self.intbuttonPRISMA)
spacerItem2 = QtGui.QSpacerItem(40, 20, QtGui.QSizePolicy.Expanding, QtGui.QSizePolicy.Minimum)
self.horizontalLayout_2.addItem(spacerItem2)
self.pushButton_9 = QtGui.QPushButton(self.Internettab)
sizePolicy = QtGui.QSizePolicy(QtGui.QSizePolicy.Maximum, QtGui.QSizePolicy.Maximum)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.pushButton_9.sizePolicy().hasHeightForWidth())
self.pushButton_9.setSizePolicy(sizePolicy)
self.pushButton_9.setMinimumSize(QtCore.QSize(0, 0))
self.pushButton_9.setMaximumSize(QtCore.QSize(16777215, 23))
font = QtGui.QFont()
font.setStyleStrategy(QtGui.QFont.PreferDefault)
self.pushButton_9.setFont(font)
self.pushButton_9.setFocusPolicy(QtCore.Qt.StrongFocus)
self.pushButton_9.setAcceptDrops(True)
self.pushButton_9.setText(_fromUtf8(""))
icon4 = QtGui.QIcon()
icon4.addPixmap(QtGui.QPixmap(_fromUtf8(userdir + userprogpath + SEP("icons/user-trash.png"))), QtGui.QIcon.Normal, QtGui.QIcon.On)
self.pushButton_9.setIcon(icon4)
self.pushButton_9.setIconSize(QtCore.QSize(24, 24))
self.pushButton_9.setAutoRepeat(False)
self.pushButton_9.setFlat(True)
self.pushButton_9.setObjectName(_fromUtf8("pushButton_9"))
self.horizontalLayout_2.addWidget(self.pushButton_9)
self.verticalLayout_3.addLayout(self.horizontalLayout_2)
self.horizontalLayout_17 = QtGui.QHBoxLayout()
self.horizontalLayout_17.setSpacing(1)
self.horizontalLayout_17.setContentsMargins(-1, 1, -1, 1)
self.horizontalLayout_17.setObjectName(_fromUtf8("horizontalLayout_17"))
self.intbuttonEUROKOS = QtGui.QPushButton(self.Internettab)
self.intbuttonEUROKOS.setMaximumSize(QtCore.QSize(16777215, 23))
self.intbuttonEUROKOS.setFocusPolicy(QtCore.Qt.StrongFocus)
self.intbuttonEUROKOS.setCheckable(True)
self.intbuttonEUROKOS.setObjectName(_fromUtf8("intbuttonEUROKOS"))
self.horizontalLayout_17.addWidget(self.intbuttonEUROKOS)
self.intbuttonDrogas = QtGui.QPushButton(self.Internettab)
self.intbuttonDrogas.setMaximumSize(QtCore.QSize(16777215, 23))
self.intbuttonDrogas.setFocusPolicy(QtCore.Qt.StrongFocus)
self.intbuttonDrogas.setCheckable(True)
self.intbuttonDrogas.setObjectName(_fromUtf8("intbuttonDrogas"))
self.horizontalLayout_17.addWidget(self.intbuttonDrogas)
self.intbuttonERMITAZAS = QtGui.QPushButton(self.Internettab)
self.intbuttonERMITAZAS.setMaximumSize(QtCore.QSize(16777215, 23))
self.intbuttonERMITAZAS.setFocusPolicy(QtCore.Qt.StrongFocus)
self.intbuttonERMITAZAS.setCheckable(True)
self.intbuttonERMITAZAS.setObjectName(_fromUtf8("intbuttonERMITAZAS"))
self.horizontalLayout_17.addWidget(self.intbuttonERMITAZAS)
self.intbuttonSenukai = QtGui.QPushButton(self.Internettab)
sizePolicy = QtGui.QSizePolicy(QtGui.QSizePolicy.Minimum, QtGui.QSizePolicy.Fixed)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.intbuttonSenukai.sizePolicy().hasHeightForWidth())
self.intbuttonSenukai.setSizePolicy(sizePolicy)
self.intbuttonSenukai.setMinimumSize(QtCore.QSize(0, 0))
self.intbuttonSenukai.setMaximumSize(QtCore.QSize(16777215, 23))
self.intbuttonSenukai.setFocusPolicy(QtCore.Qt.StrongFocus)
self.intbuttonSenukai.setCheckable(True)
self.intbuttonSenukai.setObjectName(_fromUtf8("intbuttonSenukai"))
self.horizontalLayout_17.addWidget(self.intbuttonSenukai)
self.intbuttonMoki_Vezi = QtGui.QPushButton(self.Internettab)
sizePolicy = QtGui.QSizePolicy(QtGui.QSizePolicy.Minimum, QtGui.QSizePolicy.Fixed)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.intbuttonMoki_Vezi.sizePolicy().hasHeightForWidth())
self.intbuttonMoki_Vezi.setSizePolicy(sizePolicy)
self.intbuttonMoki_Vezi.setMinimumSize(QtCore.QSize(0, 0))
self.intbuttonMoki_Vezi.setMaximumSize(QtCore.QSize(16777215, 23))
self.intbuttonMoki_Vezi.setFocusPolicy(QtCore.Qt.StrongFocus)
self.intbuttonMoki_Vezi.setCheckable(True)
self.intbuttonMoki_Vezi.setObjectName(_fromUtf8("intbuttonMoki_Vezi"))
self.horizontalLayout_17.addWidget(self.intbuttonMoki_Vezi)
self.intbuttonJysk = QtGui.QPushButton(self.Internettab)
sizePolicy = QtGui.QSizePolicy(QtGui.QSizePolicy.Minimum, QtGui.QSizePolicy.Fixed)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.intbuttonJysk.sizePolicy().hasHeightForWidth())
self.intbuttonJysk.setSizePolicy(sizePolicy)
self.intbuttonJysk.setMinimumSize(QtCore.QSize(0, 0))
self.intbuttonJysk.setMaximumSize(QtCore.QSize(16777215, 23))
self.intbuttonJysk.setFocusPolicy(QtCore.Qt.StrongFocus)
self.intbuttonJysk.setCheckable(True)
self.intbuttonJysk.setObjectName(_fromUtf8("intbuttonJysk"))
self.horizontalLayout_17.addWidget(self.intbuttonJysk)
spacerItem3 = QtGui.QSpacerItem(40, 20, QtGui.QSizePolicy.Expanding, QtGui.QSizePolicy.Minimum)
self.horizontalLayout_17.addItem(spacerItem3)
self.comboBox = QtGui.QComboBox(self.Internettab)
sizePolicy = QtGui.QSizePolicy(QtGui.QSizePolicy.Fixed, QtGui.QSizePolicy.Fixed)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.comboBox.sizePolicy().hasHeightForWidth())
self.comboBox.setSizePolicy(sizePolicy)
self.comboBox.setMinimumSize(QtCore.QSize(200, 0))
self.comboBox.setMaximumSize(QtCore.QSize(200, 23))
font = QtGui.QFont()
font.setFamily(_fromUtf8("Sans Serif"))
font.setBold(False)
font.setWeight(50)
font.setStrikeOut(False)
self.comboBox.setFont(font)
self.comboBox.setFocusPolicy(QtCore.Qt.StrongFocus)
self.comboBox.setAcceptDrops(True)
self.comboBox.setEditable(False)
self.comboBox.setMaxVisibleItems(20)
self.comboBox.setFrame(False)
self.comboBox.setObjectName(_fromUtf8("comboBox"))
self.horizontalLayout_17.addWidget(self.comboBox)
self.verticalLayout_3.addLayout(self.horizontalLayout_17)
self.horizontalLayout_4 = QtGui.QHBoxLayout()
self.horizontalLayout_4.setSpacing(1)
self.horizontalLayout_4.setContentsMargins(-1, 1, -1, 2)
self.horizontalLayout_4.setObjectName(_fromUtf8("horizontalLayout_4"))
self.pushButton_5 = QtGui.QPushButton(self.Internettab)
sizePolicy = QtGui.QSizePolicy(QtGui.QSizePolicy.Maximum, QtGui.QSizePolicy.Maximum)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.pushButton_5.sizePolicy().hasHeightForWidth())
self.pushButton_5.setSizePolicy(sizePolicy)
self.pushButton_5.setMinimumSize(QtCore.QSize(0, 0))
self.pushButton_5.setMaximumSize(QtCore.QSize(16777215, 23))
self.pushButton_5.setMouseTracking(False)
self.pushButton_5.setFocusPolicy(QtCore.Qt.StrongFocus)
self.pushButton_5.setAutoFillBackground(False)
self.pushButton_5.setText(_fromUtf8(""))
icon5 = QtGui.QIcon()
icon5.addPixmap(QtGui.QPixmap(_fromUtf8(userdir + userprogpath + SEP("icons/go-previous.png"))), QtGui.QIcon.Normal, QtGui.QIcon.On)
self.pushButton_5.setIcon(icon5)
self.pushButton_5.setIconSize(QtCore.QSize(24, 24))
self.pushButton_5.setShortcut(_fromUtf8(""))
self.pushButton_5.setAutoExclusive(False)
self.pushButton_5.setAutoDefault(False)
self.pushButton_5.setDefault(False)
self.pushButton_5.setFlat(True)
self.pushButton_5.setObjectName(_fromUtf8("pushButton_5"))
self.horizontalLayout_4.addWidget(self.pushButton_5)
self.pushButton_4 = QtGui.QPushButton(self.Internettab)
sizePolicy = QtGui.QSizePolicy(QtGui.QSizePolicy.Maximum, QtGui.QSizePolicy.Maximum)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.pushButton_4.sizePolicy().hasHeightForWidth())
self.pushButton_4.setSizePolicy(sizePolicy)
self.pushButton_4.setMinimumSize(QtCore.QSize(0, 0))
self.pushButton_4.setMaximumSize(QtCore.QSize(16777215, 23))
self.pushButton_4.setMouseTracking(False)
self.pushButton_4.setFocusPolicy(QtCore.Qt.StrongFocus)
self.pushButton_4.setText(_fromUtf8(""))
icon6 = QtGui.QIcon()
icon6.addPixmap(QtGui.QPixmap(_fromUtf8(userdir + userprogpath + SEP("icons/go-next.png"))), QtGui.QIcon.Normal, QtGui.QIcon.On)
self.pushButton_4.setIcon(icon6)
self.pushButton_4.setIconSize(QtCore.QSize(24, 24))
self.pushButton_4.setCheckable(False)
self.pushButton_4.setFlat(True)
self.pushButton_4.setObjectName(_fromUtf8("pushButton_4"))
self.horizontalLayout_4.addWidget(self.pushButton_4)
self.pushButton_3 = QtGui.QPushButton(self.Internettab)
sizePolicy = QtGui.QSizePolicy(QtGui.QSizePolicy.Maximum, QtGui.QSizePolicy.Maximum)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.pushButton_3.sizePolicy().hasHeightForWidth())
self.pushButton_3.setSizePolicy(sizePolicy)
self.pushButton_3.setMinimumSize(QtCore.QSize(0, 0))
self.pushButton_3.setMaximumSize(QtCore.QSize(16777215, 23))
self.pushButton_3.setMouseTracking(False)
self.pushButton_3.setFocusPolicy(QtCore.Qt.StrongFocus)
icon7 = QtGui.QIcon()
icon7.addPixmap(QtGui.QPixmap(_fromUtf8(userdir + userprogpath + SEP("icons/process-stop.png"))), QtGui.QIcon.Normal, QtGui.QIcon.On)
self.pushButton_3.setIcon(icon7)
self.pushButton_3.setIconSize(QtCore.QSize(24, 24))
self.pushButton_3.setFlat(True)
self.pushButton_3.setObjectName(_fromUtf8("pushButton_3"))
self.horizontalLayout_4.addWidget(self.pushButton_3)
self.pushButton = QtGui.QPushButton(self.Internettab)
sizePolicy = QtGui.QSizePolicy(QtGui.QSizePolicy.Maximum, QtGui.QSizePolicy.Maximum)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.pushButton.sizePolicy().hasHeightForWidth())
self.pushButton.setSizePolicy(sizePolicy)
self.pushButton.setMinimumSize(QtCore.QSize(0, 0))
self.pushButton.setMaximumSize(QtCore.QSize(16777215, 23))
self.pushButton.setMouseTracking(False)
self.pushButton.setFocusPolicy(QtCore.Qt.StrongFocus)
icon8 = QtGui.QIcon()
icon8.addPixmap(QtGui.QPixmap(_fromUtf8(userdir + userprogpath + SEP("icons/view-refresh.png"))), QtGui.QIcon.Normal, QtGui.QIcon.On)
self.pushButton.setIcon(icon8)
self.pushButton.setIconSize(QtCore.QSize(24, 24))
self.pushButton.setFlat(True)
self.pushButton.setObjectName(_fromUtf8("pushButton"))
self.horizontalLayout_4.addWidget(self.pushButton)
self.pushButton_22 = QtGui.QPushButton(self.Internettab)
sizePolicy = QtGui.QSizePolicy(QtGui.QSizePolicy.Maximum, QtGui.QSizePolicy.Maximum)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.pushButton_22.sizePolicy().hasHeightForWidth())
self.pushButton_22.setSizePolicy(sizePolicy)
self.pushButton_22.setMinimumSize(QtCore.QSize(0, 0))
self.pushButton_22.setMaximumSize(QtCore.QSize(16777215, 23))
self.pushButton_22.setMouseTracking(False)
self.pushButton_22.setFocusPolicy(QtCore.Qt.StrongFocus)
icon9 = QtGui.QIcon()
icon9.addPixmap(QtGui.QPixmap(_fromUtf8(userdir + userprogpath + SEP("icons/go-home.png"))), QtGui.QIcon.Normal, QtGui.QIcon.On)
self.pushButton_22.setIcon(icon9)
self.pushButton_22.setIconSize(QtCore.QSize(24, 24))
self.pushButton_22.setFlat(True)
self.pushButton_22.setObjectName(_fromUtf8("pushButton_22"))
self.horizontalLayout_4.addWidget(self.pushButton_22)
self.lineEdit = QtGui.QLineEdit(self.Internettab)
sizePolicy = QtGui.QSizePolicy(QtGui.QSizePolicy.Expanding, QtGui.QSizePolicy.Fixed)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.lineEdit.sizePolicy().hasHeightForWidth())
self.lineEdit.setSizePolicy(sizePolicy)
self.lineEdit.setMinimumSize(QtCore.QSize(360, 0))
self.lineEdit.setMaximumSize(QtCore.QSize(16777215, 23))
font = QtGui.QFont()
font.setStyleStrategy(QtGui.QFont.PreferAntialias)
self.lineEdit.setFont(font)
self.lineEdit.setMouseTracking(False)
self.lineEdit.setFocusPolicy(QtCore.Qt.StrongFocus)
self.lineEdit.setDragEnabled(True)
self.lineEdit.setCursorMoveStyle(QtCore.Qt.LogicalMoveStyle)
self.lineEdit.setObjectName(_fromUtf8("lineEdit"))
self.horizontalLayout_4.addWidget(self.lineEdit)
self.pushButton_2 = QtGui.QPushButton(self.Internettab)
sizePolicy = QtGui.QSizePolicy(QtGui.QSizePolicy.Maximum, QtGui.QSizePolicy.Maximum)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.pushButton_2.sizePolicy().hasHeightForWidth())
self.pushButton_2.setSizePolicy(sizePolicy)
self.pushButton_2.setMinimumSize(QtCore.QSize(0, 0))
self.pushButton_2.setMaximumSize(QtCore.QSize(16777215, 23))
self.pushButton_2.setMouseTracking(True)
self.pushButton_2.setFocusPolicy(QtCore.Qt.StrongFocus)
self.pushButton_2.setText(_fromUtf8(""))
icon10 = QtGui.QIcon()
icon10.addPixmap(QtGui.QPixmap(_fromUtf8(userdir + userprogpath + SEP("icons/go-jump.png"))), QtGui.QIcon.Normal, QtGui.QIcon.On)
self.pushButton_2.setIcon(icon10)
self.pushButton_2.setIconSize(QtCore.QSize(24, 24))
self.pushButton_2.setFlat(True)
self.pushButton_2.setObjectName(_fromUtf8("pushButton_2"))
self.horizontalLayout_4.addWidget(self.pushButton_2)
spacerItem4 = QtGui.QSpacerItem(40, 23, QtGui.QSizePolicy.Expanding, QtGui.QSizePolicy.Minimum)
self.horizontalLayout_4.addItem(spacerItem4)
self.pushButton_12 = QtGui.QPushButton(self.Internettab)
sizePolicy = QtGui.QSizePolicy(QtGui.QSizePolicy.Maximum, QtGui.QSizePolicy.Maximum)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.pushButton_12.sizePolicy().hasHeightForWidth())
self.pushButton_12.setSizePolicy(sizePolicy)
self.pushButton_12.setMaximumSize(QtCore.QSize(16777215, 23))
self.pushButton_12.setFocusPolicy(QtCore.Qt.StrongFocus)
self.pushButton_12.setObjectName(_fromUtf8("pushButton_12"))
self.horizontalLayout_4.addWidget(self.pushButton_12)
self.progressBar_2 = QtGui.QProgressBar(self.Internettab)
sizePolicy = QtGui.QSizePolicy(QtGui.QSizePolicy.Maximum, QtGui.QSizePolicy.Maximum)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.progressBar_2.sizePolicy().hasHeightForWidth())
self.progressBar_2.setSizePolicy(sizePolicy)
self.progressBar_2.setMinimumSize(QtCore.QSize(0, 0))
self.progressBar_2.setMaximumSize(QtCore.QSize(16777215, 23))
self.progressBar_2.setProperty("value", 0)
self.progressBar_2.setObjectName(_fromUtf8("progressBar_2"))
self.horizontalLayout_4.addWidget(self.progressBar_2)
self.verticalLayout_3.addLayout(self.horizontalLayout_4)
self.line_3 = QtGui.QFrame(self.Internettab)
self.line_3.setFrameShape(QtGui.QFrame.HLine)
self.line_3.setFrameShadow(QtGui.QFrame.Sunken)
self.line_3.setObjectName(_fromUtf8("line_3"))
self.verticalLayout_3.addWidget(self.line_3)
self.webView = QtWebKit.QWebView(self.Internettab)
font = QtGui.QFont()
font.setStyleStrategy(QtGui.QFont.PreferAntialias)
self.webView.setFont(font)
self.webView.setMouseTracking(False)
self.webView.setProperty("url", QtCore.QUrl(_fromUtf8("about:blank")))
self.webView.setObjectName(_fromUtf8("webView"))
self.verticalLayout_3.addWidget(self.webView)
self.tabWidget.addTab(self.Internettab, _fromUtf8(""))
self.tab = QtGui.QWidget()
self.tab.setObjectName(_fromUtf8("tab"))
self.verticalLayout = QtGui.QVBoxLayout(self.tab)
self.verticalLayout.setObjectName(_fromUtf8("verticalLayout"))
self.verticalLayout_11 = QtGui.QVBoxLayout()
self.verticalLayout_11.setSpacing(2)
self.verticalLayout_11.setContentsMargins(-1, 0, 0, -1)
self.verticalLayout_11.setObjectName(_fromUtf8("verticalLayout_11"))
self.line_4 = QtGui.QFrame(self.tab)
self.line_4.setFrameShape(QtGui.QFrame.HLine)
self.line_4.setFrameShadow(QtGui.QFrame.Sunken)
self.line_4.setObjectName(_fromUtf8("line_4"))
self.verticalLayout_11.addWidget(self.line_4)
self.horizontalLayout_15 = QtGui.QHBoxLayout()
self.horizontalLayout_15.setContentsMargins(-1, 0, -1, 0)
self.horizontalLayout_15.setObjectName(_fromUtf8("horizontalLayout_15"))
self.label_3 = QtGui.QLabel(self.tab)
sizePolicy = QtGui.QSizePolicy(QtGui.QSizePolicy.Minimum, QtGui.QSizePolicy.Maximum)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.label_3.sizePolicy().hasHeightForWidth())
self.label_3.setSizePolicy(sizePolicy)
self.label_3.setTextFormat(QtCore.Qt.RichText)
self.label_3.setScaledContents(True)
self.label_3.setAlignment(QtCore.Qt.AlignCenter)
self.label_3.setWordWrap(False)
self.label_3.setObjectName(_fromUtf8("label_3"))
self.horizontalLayout_15.addWidget(self.label_3)
spacerItem5 = QtGui.QSpacerItem(40, 20, QtGui.QSizePolicy.Expanding, QtGui.QSizePolicy.Minimum)
self.horizontalLayout_15.addItem(spacerItem5)
self.verticalLayout_11.addLayout(self.horizontalLayout_15)
self.horizontalLayout_14 = QtGui.QHBoxLayout()
self.horizontalLayout_14.setSpacing(2)
self.horizontalLayout_14.setSizeConstraint(QtGui.QLayout.SetDefaultConstraint)
self.horizontalLayout_14.setMargin(0)
self.horizontalLayout_14.setObjectName(_fromUtf8("horizontalLayout_14"))
self.verticalLayout_7 = QtGui.QVBoxLayout()
self.verticalLayout_7.setSpacing(2)
self.verticalLayout_7.setSizeConstraint(QtGui.QLayout.SetDefaultConstraint)
self.verticalLayout_7.setMargin(0)
self.verticalLayout_7.setObjectName(_fromUtf8("verticalLayout_7"))
self.checkboxmaxima = QtGui.QCheckBox(self.tab)
sizePolicy = QtGui.QSizePolicy(QtGui.QSizePolicy.Minimum, QtGui.QSizePolicy.Preferred)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.checkboxmaxima.sizePolicy().hasHeightForWidth())
self.checkboxmaxima.setSizePolicy(sizePolicy)
self.checkboxmaxima.setMinimumSize(QtCore.QSize(0, 0))
self.checkboxmaxima.setFocusPolicy(QtCore.Qt.StrongFocus)
self.checkboxmaxima.setLayoutDirection(QtCore.Qt.LeftToRight)
self.checkboxmaxima.setAutoFillBackground(False)
self.checkboxmaxima.setChecked(False)
self.checkboxmaxima.setTristate(False)
self.checkboxmaxima.setObjectName(_fromUtf8("checkboxmaxima"))
self.verticalLayout_7.addWidget(self.checkboxmaxima)
self.checkBoxnorfa = QtGui.QCheckBox(self.tab)
sizePolicy = QtGui.QSizePolicy(QtGui.QSizePolicy.Minimum, QtGui.QSizePolicy.Preferred)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.checkBoxnorfa.sizePolicy().hasHeightForWidth())
self.checkBoxnorfa.setSizePolicy(sizePolicy)
self.checkBoxnorfa.setFocusPolicy(QtCore.Qt.StrongFocus)
self.checkBoxnorfa.setChecked(False)
self.checkBoxnorfa.setObjectName(_fromUtf8("checkBoxnorfa"))
self.verticalLayout_7.addWidget(self.checkBoxnorfa)
self.checkBoxiki = QtGui.QCheckBox(self.tab)
sizePolicy = QtGui.QSizePolicy(QtGui.QSizePolicy.Minimum, QtGui.QSizePolicy.Preferred)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.checkBoxiki.sizePolicy().hasHeightForWidth())
self.checkBoxiki.setSizePolicy(sizePolicy)
self.checkBoxiki.setFocusPolicy(QtCore.Qt.StrongFocus)
self.checkBoxiki.setChecked(False)
self.checkBoxiki.setObjectName(_fromUtf8("checkBoxiki"))
self.verticalLayout_7.addWidget(self.checkBoxiki)
self.checkBoxrimi = QtGui.QCheckBox(self.tab)
sizePolicy = QtGui.QSizePolicy(QtGui.QSizePolicy.Minimum, QtGui.QSizePolicy.Preferred)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.checkBoxrimi.sizePolicy().hasHeightForWidth())
self.checkBoxrimi.setSizePolicy(sizePolicy)
self.checkBoxrimi.setFocusPolicy(QtCore.Qt.StrongFocus)
self.checkBoxrimi.setChecked(False)
self.checkBoxrimi.setObjectName(_fromUtf8("checkBoxrimi"))
self.verticalLayout_7.addWidget(self.checkBoxrimi)
self.checkboxAibe = QtGui.QCheckBox(self.tab)
sizePolicy = QtGui.QSizePolicy(QtGui.QSizePolicy.Minimum, QtGui.QSizePolicy.Preferred)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.checkboxAibe.sizePolicy().hasHeightForWidth())
self.checkboxAibe.setSizePolicy(sizePolicy)
self.checkboxAibe.setFocusPolicy(QtCore.Qt.StrongFocus)
self.checkboxAibe.setObjectName(_fromUtf8("checkboxAibe"))
self.verticalLayout_7.addWidget(self.checkboxAibe)
self.checkboxFRESH_MARKET = QtGui.QCheckBox(self.tab)
sizePolicy = QtGui.QSizePolicy(QtGui.QSizePolicy.Minimum, QtGui.QSizePolicy.Preferred)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.checkboxFRESH_MARKET.sizePolicy().hasHeightForWidth())
self.checkboxFRESH_MARKET.setSizePolicy(sizePolicy)
self.checkboxFRESH_MARKET.setFocusPolicy(QtCore.Qt.StrongFocus)
self.checkboxFRESH_MARKET.setObjectName(_fromUtf8("checkboxFRESH_MARKET"))
self.verticalLayout_7.addWidget(self.checkboxFRESH_MARKET)
self.checkboxPROMO = QtGui.QCheckBox(self.tab)
sizePolicy = QtGui.QSizePolicy(QtGui.QSizePolicy.Minimum, QtGui.QSizePolicy.Preferred)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.checkboxPROMO.sizePolicy().hasHeightForWidth())
self.checkboxPROMO.setSizePolicy(sizePolicy)
self.checkboxPROMO.setFocusPolicy(QtCore.Qt.StrongFocus)
self.checkboxPROMO.setObjectName(_fromUtf8("checkboxPROMO"))
self.verticalLayout_7.addWidget(self.checkboxPROMO)
self.checkboxPRISMA = QtGui.QCheckBox(self.tab)
sizePolicy = QtGui.QSizePolicy(QtGui.QSizePolicy.Minimum, QtGui.QSizePolicy.Preferred)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.checkboxPRISMA.sizePolicy().hasHeightForWidth())
self.checkboxPRISMA.setSizePolicy(sizePolicy)
self.checkboxPRISMA.setFocusPolicy(QtCore.Qt.StrongFocus)
self.checkboxPRISMA.setObjectName(_fromUtf8("checkboxPRISMA"))
self.verticalLayout_7.addWidget(self.checkboxPRISMA)
self.checkboxEUROKOS = QtGui.QCheckBox(self.tab)
sizePolicy = QtGui.QSizePolicy(QtGui.QSizePolicy.Minimum, QtGui.QSizePolicy.Preferred)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.checkboxEUROKOS.sizePolicy().hasHeightForWidth())
self.checkboxEUROKOS.setSizePolicy(sizePolicy)
self.checkboxEUROKOS.setFocusPolicy(QtCore.Qt.StrongFocus)
self.checkboxEUROKOS.setObjectName(_fromUtf8("checkboxEUROKOS"))
self.verticalLayout_7.addWidget(self.checkboxEUROKOS)
self.checkboxDrogas = QtGui.QCheckBox(self.tab)
sizePolicy = QtGui.QSizePolicy(QtGui.QSizePolicy.Minimum, QtGui.QSizePolicy.Preferred)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.checkboxDrogas.sizePolicy().hasHeightForWidth())
self.checkboxDrogas.setSizePolicy(sizePolicy)
self.checkboxDrogas.setFocusPolicy(QtCore.Qt.StrongFocus)
self.checkboxDrogas.setObjectName(_fromUtf8("checkboxDrogas"))
self.verticalLayout_7.addWidget(self.checkboxDrogas)
self.checkboxERMITAZAS = QtGui.QCheckBox(self.tab)
sizePolicy = QtGui.QSizePolicy(QtGui.QSizePolicy.Minimum, QtGui.QSizePolicy.Preferred)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.checkboxERMITAZAS.sizePolicy().hasHeightForWidth())
self.checkboxERMITAZAS.setSizePolicy(sizePolicy)
self.checkboxERMITAZAS.setFocusPolicy(QtCore.Qt.StrongFocus)
self.checkboxERMITAZAS.setObjectName(_fromUtf8("checkboxERMITAZAS"))
self.verticalLayout_7.addWidget(self.checkboxERMITAZAS)
self.checkboxSenukai = QtGui.QCheckBox(self.tab)
sizePolicy = QtGui.QSizePolicy(QtGui.QSizePolicy.Minimum, QtGui.QSizePolicy.Preferred)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.checkboxSenukai.sizePolicy().hasHeightForWidth())
self.checkboxSenukai.setSizePolicy(sizePolicy)
self.checkboxSenukai.setFocusPolicy(QtCore.Qt.StrongFocus)
self.checkboxSenukai.setObjectName(_fromUtf8("checkboxSenukai"))
self.verticalLayout_7.addWidget(self.checkboxSenukai)
self.checkboxMoki_Vezi = QtGui.QCheckBox(self.tab)
sizePolicy = QtGui.QSizePolicy(QtGui.QSizePolicy.Minimum, QtGui.QSizePolicy.Preferred)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.checkboxMoki_Vezi.sizePolicy().hasHeightForWidth())
self.checkboxMoki_Vezi.setSizePolicy(sizePolicy)
self.checkboxMoki_Vezi.setFocusPolicy(QtCore.Qt.StrongFocus)
self.checkboxMoki_Vezi.setObjectName(_fromUtf8("checkboxMoki_Vezi"))
self.verticalLayout_7.addWidget(self.checkboxMoki_Vezi)
self.horizontalLayout_14.addLayout(self.verticalLayout_7)
spacerItem6 = QtGui.QSpacerItem(40, 20, QtGui.QSizePolicy.Expanding, QtGui.QSizePolicy.Minimum)
self.horizontalLayout_14.addItem(spacerItem6)
self.verticalLayout_11.addLayout(self.horizontalLayout_14)
self.horizontalLayout_8 = QtGui.QHBoxLayout()
self.horizontalLayout_8.setSpacing(0)
self.horizontalLayout_8.setContentsMargins(-1, 0, -1, 0)
self.horizontalLayout_8.setObjectName(_fromUtf8("horizontalLayout_8"))
self.pushButtondownloadpdf = QtGui.QPushButton(self.tab)
sizePolicy = QtGui.QSizePolicy(QtGui.QSizePolicy.Preferred, QtGui.QSizePolicy.Preferred)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.pushButtondownloadpdf.sizePolicy().hasHeightForWidth())
self.pushButtondownloadpdf.setSizePolicy(sizePolicy)
self.pushButtondownloadpdf.setMinimumSize(QtCore.QSize(85, 0))
self.pushButtondownloadpdf.setFocusPolicy(QtCore.Qt.StrongFocus)
self.pushButtondownloadpdf.setObjectName(_fromUtf8("pushButtondownloadpdf"))
self.horizontalLayout_8.addWidget(self.pushButtondownloadpdf)
spacerItem7 = QtGui.QSpacerItem(40, 20, QtGui.QSizePolicy.Expanding, QtGui.QSizePolicy.Minimum)
self.horizontalLayout_8.addItem(spacerItem7)
self.verticalLayout_11.addLayout(self.horizontalLayout_8)
self.horizontalLayout_6 = QtGui.QHBoxLayout()
self.horizontalLayout_6.setSpacing(0)
self.horizontalLayout_6.setContentsMargins(-1, 2, -1, 2)
self.horizontalLayout_6.setObjectName(_fromUtf8("horizontalLayout_6"))
self.checkBox_4 = QtGui.QCheckBox(self.tab)
sizePolicy = QtGui.QSizePolicy(QtGui.QSizePolicy.Fixed, QtGui.QSizePolicy.Fixed)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.checkBox_4.sizePolicy().hasHeightForWidth())
self.checkBox_4.setSizePolicy(sizePolicy)
self.checkBox_4.setBaseSize(QtCore.QSize(0, 0))
self.checkBox_4.setFocusPolicy(QtCore.Qt.StrongFocus)
self.checkBox_4.setObjectName(_fromUtf8("checkBox_4"))
self.horizontalLayout_6.addWidget(self.checkBox_4)
self.spinBox_3 = QtGui.QSpinBox(self.tab)
self.spinBox_3.setMaximum(30)
self.spinBox_3.setProperty("value", 1)
self.spinBox_3.setObjectName(_fromUtf8("spinBox_3"))
self.horizontalLayout_6.addWidget(self.spinBox_3)
self.label_5 = QtGui.QLabel(self.tab)
self.label_5.setObjectName(_fromUtf8("label_5"))
self.horizontalLayout_6.addWidget(self.label_5)
spacerItem8 = QtGui.QSpacerItem(40, 20, QtGui.QSizePolicy.Expanding, QtGui.QSizePolicy.Minimum)
self.horizontalLayout_6.addItem(spacerItem8)
self.verticalLayout_11.addLayout(self.horizontalLayout_6)
self.horizontalLayout_3 = QtGui.QHBoxLayout()
self.horizontalLayout_3.setSpacing(0)
self.horizontalLayout_3.setContentsMargins(-1, 2, -1, 2)
self.horizontalLayout_3.setObjectName(_fromUtf8("horizontalLayout_3"))
self.checkBox_3 = QtGui.QCheckBox(self.tab)
sizePolicy = QtGui.QSizePolicy(QtGui.QSizePolicy.Fixed, QtGui.QSizePolicy.Fixed)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.checkBox_3.sizePolicy().hasHeightForWidth())
self.checkBox_3.setSizePolicy(sizePolicy)
self.checkBox_3.setFocusPolicy(QtCore.Qt.StrongFocus)
self.checkBox_3.setChecked(False)
self.checkBox_3.setObjectName(_fromUtf8("checkBox_3"))
self.horizontalLayout_3.addWidget(self.checkBox_3)
self.spinBox = QtGui.QSpinBox(self.tab)
sizePolicy = QtGui.QSizePolicy(QtGui.QSizePolicy.Fixed, QtGui.QSizePolicy.Fixed)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.spinBox.sizePolicy().hasHeightForWidth())
self.spinBox.setSizePolicy(sizePolicy)
self.spinBox.setMinimum(5)
self.spinBox.setMaximum(365)
self.spinBox.setSingleStep(5)
self.spinBox.setProperty("value", 180)
self.spinBox.setObjectName(_fromUtf8("spinBox"))
self.horizontalLayout_3.addWidget(self.spinBox)
self.label_2 = QtGui.QLabel(self.tab)
sizePolicy = QtGui.QSizePolicy(QtGui.QSizePolicy.Fixed, QtGui.QSizePolicy.Fixed)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.label_2.sizePolicy().hasHeightForWidth())
self.label_2.setSizePolicy(sizePolicy)
self.label_2.setObjectName(_fromUtf8("label_2"))
self.horizontalLayout_3.addWidget(self.label_2)
self.pushButton_8 = QtGui.QPushButton(self.tab)
sizePolicy = QtGui.QSizePolicy(QtGui.QSizePolicy.Fixed, QtGui.QSizePolicy.Fixed)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.pushButton_8.sizePolicy().hasHeightForWidth())
self.pushButton_8.setSizePolicy(sizePolicy)
self.pushButton_8.setFocusPolicy(QtCore.Qt.StrongFocus)
self.pushButton_8.setObjectName(_fromUtf8("pushButton_8"))
self.horizontalLayout_3.addWidget(self.pushButton_8)
spacerItem9 = QtGui.QSpacerItem(40, 20, QtGui.QSizePolicy.Expanding, QtGui.QSizePolicy.Minimum)
self.horizontalLayout_3.addItem(spacerItem9)
self.verticalLayout_11.addLayout(self.horizontalLayout_3)
self.line = QtGui.QFrame(self.tab)
self.line.setFrameShape(QtGui.QFrame.HLine)
self.line.setFrameShadow(QtGui.QFrame.Sunken)
self.line.setObjectName(_fromUtf8("line"))
self.verticalLayout_11.addWidget(self.line)
self.verticalLayout.addLayout(self.verticalLayout_11)
self.horizontalLayout_16 = QtGui.QHBoxLayout()
self.horizontalLayout_16.setContentsMargins(-1, 0, -1, 0)
self.horizontalLayout_16.setObjectName(_fromUtf8("horizontalLayout_16"))
self.label = QtGui.QLabel(self.tab)
self.label.setObjectName(_fromUtf8("label"))
self.horizontalLayout_16.addWidget(self.label)
spacerItem10 = QtGui.QSpacerItem(40, 20, QtGui.QSizePolicy.Expanding, QtGui.QSizePolicy.Minimum)
self.horizontalLayout_16.addItem(spacerItem10)
self.verticalLayout.addLayout(self.horizontalLayout_16)
self.horizontalLayout_7 = QtGui.QHBoxLayout()
self.horizontalLayout_7.setSpacing(0)
self.horizontalLayout_7.setContentsMargins(0, 2, 0, 2)
self.horizontalLayout_7.setObjectName(_fromUtf8("horizontalLayout_7"))
self.checkBox_2 = QtGui.QCheckBox(self.tab)
self.checkBox_2.setFocusPolicy(QtCore.Qt.StrongFocus)
self.checkBox_2.setChecked(False)
self.checkBox_2.setObjectName(_fromUtf8("checkBox_2"))
self.horizontalLayout_7.addWidget(self.checkBox_2)
self.spinBox_2 = QtGui.QSpinBox(self.tab)
sizePolicy = QtGui.QSizePolicy(QtGui.QSizePolicy.Fixed, QtGui.QSizePolicy.Fixed)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.spinBox_2.sizePolicy().hasHeightForWidth())
self.spinBox_2.setSizePolicy(sizePolicy)
self.spinBox_2.setMinimum(0)
self.spinBox_2.setMaximum(30)
self.spinBox_2.setProperty("value", 1)
self.spinBox_2.setObjectName(_fromUtf8("spinBox_2"))
self.horizontalLayout_7.addWidget(self.spinBox_2)
self.label_4 = QtGui.QLabel(self.tab)
self.label_4.setObjectName(_fromUtf8("label_4"))
self.horizontalLayout_7.addWidget(self.label_4)
self.pushButton_7 = QtGui.QPushButton(self.tab)
sizePolicy = QtGui.QSizePolicy(QtGui.QSizePolicy.Fixed, QtGui.QSizePolicy.Fixed)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.pushButton_7.sizePolicy().hasHeightForWidth())
self.pushButton_7.setSizePolicy(sizePolicy)
self.pushButton_7.setFocusPolicy(QtCore.Qt.StrongFocus)
self.pushButton_7.setObjectName(_fromUtf8("pushButton_7"))
self.horizontalLayout_7.addWidget(self.pushButton_7)
spacerItem11 = QtGui.QSpacerItem(40, 20, QtGui.QSizePolicy.Expanding, QtGui.QSizePolicy.Minimum)
self.horizontalLayout_7.addItem(spacerItem11)
self.verticalLayout.addLayout(self.horizontalLayout_7)
self.line_6 = QtGui.QFrame(self.tab)
self.line_6.setFrameShape(QtGui.QFrame.HLine)
self.line_6.setFrameShadow(QtGui.QFrame.Sunken)
self.line_6.setObjectName(_fromUtf8("line_6"))
self.verticalLayout.addWidget(self.line_6)
spacerItem12 = QtGui.QSpacerItem(20, 40, QtGui.QSizePolicy.Minimum, QtGui.QSizePolicy.Expanding)
self.verticalLayout.addItem(spacerItem12)
self.plainTextEdit = QtGui.QPlainTextEdit(self.tab)
self.plainTextEdit.setFocusPolicy(QtCore.Qt.NoFocus)
self.plainTextEdit.setContextMenuPolicy(QtCore.Qt.NoContextMenu)
self.plainTextEdit.setAcceptDrops(False)
self.plainTextEdit.setAutoFillBackground(True)
self.plainTextEdit.setFrameShape(QtGui.QFrame.StyledPanel)
self.plainTextEdit.setFrameShadow(QtGui.QFrame.Plain)
self.plainTextEdit.setVerticalScrollBarPolicy(QtCore.Qt.ScrollBarAsNeeded)
self.plainTextEdit.setHorizontalScrollBarPolicy(QtCore.Qt.ScrollBarAsNeeded)
self.plainTextEdit.setUndoRedoEnabled(True)
self.plainTextEdit.setLineWrapMode(QtGui.QPlainTextEdit.NoWrap)
self.plainTextEdit.setReadOnly(True)
self.plainTextEdit.setBackgroundVisible(False)
self.plainTextEdit.setObjectName(_fromUtf8("plainTextEdit"))
self.verticalLayout.addWidget(self.plainTextEdit)
self.progressBar = QtGui.QProgressBar(self.tab)
self.progressBar.setProperty("value", 0)
self.progressBar.setTextDirection(QtGui.QProgressBar.TopToBottom)
self.progressBar.setObjectName(_fromUtf8("progressBar"))
self.verticalLayout.addWidget(self.progressBar)
self.tabWidget.addTab(self.tab, _fromUtf8(""))
self.tab_2 = QtGui.QWidget()
self.tab_2.setObjectName(_fromUtf8("tab_2"))
self.verticalLayout_6 = QtGui.QVBoxLayout(self.tab_2)
self.verticalLayout_6.setObjectName(_fromUtf8("verticalLayout_6"))
self.verticalLayout_5 = QtGui.QVBoxLayout()
self.verticalLayout_5.setSpacing(6)
self.verticalLayout_5.setContentsMargins(-1, 0, -1, 0)
self.verticalLayout_5.setObjectName(_fromUtf8("verticalLayout_5"))
self.horizontalLayout_13 = QtGui.QHBoxLayout()
self.horizontalLayout_13.setContentsMargins(-1, 0, -1, 0)
self.horizontalLayout_13.setObjectName(_fromUtf8("horizontalLayout_13"))
self.checkBox = QtGui.QCheckBox(self.tab_2)
sizePolicy = QtGui.QSizePolicy(QtGui.QSizePolicy.Fixed, QtGui.QSizePolicy.Fixed)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.checkBox.sizePolicy().hasHeightForWidth())
self.checkBox.setSizePolicy(sizePolicy)
self.checkBox.setFocusPolicy(QtCore.Qt.StrongFocus)
self.checkBox.setObjectName(_fromUtf8("checkBox"))
self.horizontalLayout_13.addWidget(self.checkBox)
spacerItem13 = QtGui.QSpacerItem(40, 20, QtGui.QSizePolicy.Expanding, QtGui.QSizePolicy.Minimum)
self.horizontalLayout_13.addItem(spacerItem13)
self.verticalLayout_5.addLayout(self.horizontalLayout_13)
self.horizontalLayout_9 = QtGui.QHBoxLayout()
self.horizontalLayout_9.setContentsMargins(-1, 0, -1, 0)
self.horizontalLayout_9.setObjectName(_fromUtf8("horizontalLayout_9"))
self.spinBox_4 = QtGui.QSpinBox(self.tab_2)
self.spinBox_4.setFrame(True)
self.spinBox_4.setButtonSymbols(QtGui.QAbstractSpinBox.UpDownArrows)
self.spinBox_4.setMinimum(100)
self.spinBox_4.setMaximum(250)
self.spinBox_4.setSingleStep(10)
self.spinBox_4.setProperty("value", 150)
self.spinBox_4.setObjectName(_fromUtf8("spinBox_4"))
self.horizontalLayout_9.addWidget(self.spinBox_4)
self.label_6 = QtGui.QLabel(self.tab_2)
self.label_6.setObjectName(_fromUtf8("label_6"))
self.horizontalLayout_9.addWidget(self.label_6)
spacerItem14 = QtGui.QSpacerItem(40, 20, QtGui.QSizePolicy.Expanding, QtGui.QSizePolicy.Minimum)
self.horizontalLayout_9.addItem(spacerItem14)
self.verticalLayout_5.addLayout(self.horizontalLayout_9)
self.horizontalLayout_10 = QtGui.QHBoxLayout()
self.horizontalLayout_10.setContentsMargins(-1, 0, -1, 0)
self.horizontalLayout_10.setObjectName(_fromUtf8("horizontalLayout_10"))
self.pushButton_10 = QtGui.QPushButton(self.tab_2)
self.pushButton_10.setFocusPolicy(QtCore.Qt.StrongFocus)
self.pushButton_10.setObjectName(_fromUtf8("pushButton_10"))
self.horizontalLayout_10.addWidget(self.pushButton_10)
self.label_7 = QtGui.QLabel(self.tab_2)
self.label_7.setObjectName(_fromUtf8("label_7"))
self.horizontalLayout_10.addWidget(self.label_7)
spacerItem15 = QtGui.QSpacerItem(40, 20, QtGui.QSizePolicy.Expanding, QtGui.QSizePolicy.Minimum)
self.horizontalLayout_10.addItem(spacerItem15)
self.verticalLayout_5.addLayout(self.horizontalLayout_10)
self.horizontalLayout_11 = QtGui.QHBoxLayout()
self.horizontalLayout_11.setSpacing(0)
self.horizontalLayout_11.setContentsMargins(-1, 0, -1, 0)
self.horizontalLayout_11.setObjectName(_fromUtf8("horizontalLayout_11"))
self.pushButton_13 = QtGui.QPushButton(self.tab_2)
self.pushButton_13.setFocusPolicy(QtCore.Qt.StrongFocus)
self.pushButton_13.setObjectName(_fromUtf8("pushButton_13"))
self.horizontalLayout_11.addWidget(self.pushButton_13)
spacerItem16 = QtGui.QSpacerItem(40, 20, QtGui.QSizePolicy.Expanding, QtGui.QSizePolicy.Minimum)
self.horizontalLayout_11.addItem(spacerItem16)
self.verticalLayout_5.addLayout(self.horizontalLayout_11)
self.verticalLayout_6.addLayout(self.verticalLayout_5)
spacerItem17 = QtGui.QSpacerItem(20, 40, QtGui.QSizePolicy.Minimum, QtGui.QSizePolicy.Expanding)
self.verticalLayout_6.addItem(spacerItem17)
self.tabWidget.addTab(self.tab_2, _fromUtf8(""))
self.horizontalLayout.addWidget(self.tabWidget)
self.gridLayout.addLayout(self.horizontalLayout, 0, 0, 1, 1)
self.verticalLayout_2.addLayout(self.gridLayout)
MainWindow.setCentralWidget(self.centralwidget)
self.retranslateUi(MainWindow)
self.tabWidget.setCurrentIndex(0)
self.comboBox_5.setCurrentIndex(0)
self.comboBox_7.setCurrentIndex(0)
QtCore.QMetaObject.connectSlotsByName(MainWindow)
def retranslateUi(self, MainWindow):
MainWindow.setWindowTitle(_translate("MainWindow", "Reklaminiai Parduotuvių Lankstinukai", None))
self.comboBox_2.setItemText(0, _translate("MainWindow", "Maxima", None))
self.comboBox_3.setItemText(0, _translate("MainWindow", "Norfa", None))
self.comboBox_4.setItemText(0, _translate("MainWindow", "Iki", None))
self.comboBox_6.setItemText(0, _translate("MainWindow", "Rimi", None))
self.comboBox_5.setItemText(0, _translate("MainWindow", "Aibė", None))
self.comboBox_7.setItemText(0, _translate("MainWindow", "FRESH MARKET", None))
self.comboBox_10.setItemText(0, _translate("MainWindow", "PROMO CashCarry", None))
self.comboBox_11.setItemText(0, _translate("MainWindow", "PRISMA", None))
self.comboBox_12.setItemText(0, _translate("MainWindow", "EUROKOS", None))
self.comboBox_13.setItemText(0, _translate("MainWindow", "Drogas", None))
self.comboBox_14.setItemText(0, _translate("MainWindow", "ERMITAŽAS", None))
self.comboBox_8.setItemText(0, _translate("MainWindow", "Senukai", None))
self.comboBox_9.setItemText(0, _translate("MainWindow", "Moki*Veži", None))
self.label_8.setText(_translate("MainWindow", "TextLabel", None))
self.tabWidget.setTabText(self.tabWidget.indexOf(self.pdftab), _translate("MainWindow", "Lankstinukai", None))
self.Intbuttonmaxima.setText(_translate("MainWindow", "Maxima", None))
self.Intbuttonnorfa.setText(_translate("MainWindow", "Norfa", None))
self.Intbuttoniki.setText(_translate("MainWindow", "Iki", None))
self.Intbuttonrimi.setText(_translate("MainWindow", "Rimi", None))
self.intbuttonaibe.setText(_translate("MainWindow", "Aibė", None))
self.intbuttonFRESH_MARKET.setText(_translate("MainWindow", "FRESH MARKET", None))
self.intbuttonPROMO.setText(_translate("MainWindow", "PROMO CashCarry", None))
self.intbuttonPRISMA.setText(_translate("MainWindow", "PRISMA", None))
self.intbuttonEUROKOS.setText(_translate("MainWindow", "EUROKOS", None))
self.intbuttonDrogas.setText(_translate("MainWindow", "Drogas", None))
self.intbuttonERMITAZAS.setText(_translate("MainWindow", "ERMITAŽAS", None))
self.intbuttonSenukai.setText(_translate("MainWindow", "Senukai", None))
self.intbuttonMoki_Vezi.setText(_translate("MainWindow", "Moki*Veži", None))
self.intbuttonJysk.setText(_translate("MainWindow", "Jysk", None))
self.pushButton_12.setText(_translate("MainWindow", "Į adresyną", None))
self.tabWidget.setTabText(self.tabWidget.indexOf(self.Internettab), _translate("MainWindow", "Internetas", None))
self.label_3.setText(_translate("MainWindow", "<html><head/><body><p align=\"justify\"><span style=\" font-weight:600;\">Lankstinukų atnaujinimas</span><br/></p></body></html>", None))
self.checkboxmaxima.setText(_translate("MainWindow", "Maxima", None))
self.checkBoxnorfa.setText(_translate("MainWindow", "Norfa", None))
self.checkBoxiki.setText(_translate("MainWindow", "Iki", None))
self.checkBoxrimi.setText(_translate("MainWindow", "Rimi", None))
self.checkboxAibe.setText(_translate("MainWindow", "Aibė", None))
self.checkboxFRESH_MARKET.setText(_translate("MainWindow", "FRESH MARKET", None))
self.checkboxPROMO.setText(_translate("MainWindow", "PROMO CashCarry", None))
self.checkboxPRISMA.setText(_translate("MainWindow", "PRISMA", None))
self.checkboxEUROKOS.setText(_translate("MainWindow", "EUROKOS", None))
self.checkboxDrogas.setText(_translate("MainWindow", "Drogas", None))
self.checkboxERMITAZAS.setText(_translate("MainWindow", "ERMITAŽAS", None))
self.checkboxSenukai.setText(_translate("MainWindow", "Senukai", None))
self.checkboxMoki_Vezi.setText(_translate("MainWindow", "Moki*Veži", None))
self.pushButtondownloadpdf.setText(_translate("MainWindow", "Tikrinti ir atsiųsti dabar", None))
self.checkBox_4.setText(_translate("MainWindow", "Automatiškai tikrinti ar yra naujų lankstinukų kas ", None))
self.label_5.setText(_translate("MainWindow", " dienų ", None))
self.checkBox_3.setText(_translate("MainWindow", "Automatiškai trinti senus lankstinukus po ", None))
self.label_2.setText(_translate("MainWindow", " dienų ", None))
self.pushButton_8.setText(_translate("MainWindow", "Trinti dabar", None))
self.label.setText(_translate("MainWindow", "<html><head/><body><p align=\"justify\"><span style=\" font-weight:600;\">Programos atnaujinimas</span><br/></p></body></html>", None))
self.checkBox_2.setText(_translate("MainWindow", "Automatiškai tikrinti įjungiant programą kas ", None))
self.label_4.setText(_translate("MainWindow", " dienų ", None))
self.pushButton_7.setText(_translate("MainWindow", "Tikrinti ir atsiųsti dabar", None))
self.tabWidget.setTabText(self.tabWidget.indexOf(self.tab), _translate("MainWindow", "Naujinimas", None))
self.checkBox.setText(_translate("MainWindow", "Naudoti pdf.js. Lėtas ir kolkas kreivokai atvaizduoja su standartiniu webkit\'u.", None))
self.label_6.setText(_translate("MainWindow", "PPI paveikslėlių kūrimui iš lankstinukų. 1920x* ekranui reikėtų 200.", None))
self.pushButton_10.setText(_translate("MainWindow", "Ištrinti paveikslėlius", None))
self.label_7.setText(_translate("MainWindow", "Nespausk. Rimtai ;)", None))
self.pushButton_13.setText(_translate("MainWindow", "Pagalba", None))
self.tabWidget.setTabText(self.tabWidget.indexOf(self.tab_2), _translate("MainWindow", "Nustatymai", None))
| agpl-3.0 | -3,426,794,644,451,782,700 | 60.519692 | 192 | 0.722246 | false | 3.714589 | false | false | false |
lord63/wonderful_bing | tests/test_computer.py | 1 | 2119 | #!/usr/bin/env python
# -*- coding: utf-8 -*-
from __future__ import absolute_import
from os import path
import mock
import pytest
from wonderful_bing.wonderful_bing import Computer
@pytest.fixture
def computer():
computer = Computer()
return computer
def test_computer(computer):
gnome_based = ("DISPLAY=:0 GSETTINGS_BACKEND=dconf "
"/usr/bin/gsettings set org.gnome.desktop.background "
"picture-uri file://{0}")
mate_based = ("DISPLAY=:0 GSETTINGS_BACKEND=dconf "
"/usr/bin/gsettings set org.mate.background "
"picture-filename '{0}'")
xfce_based = ("DISPLAY=:0 xfconf-query -c xfce4-desktop "
"-p /backdrop/screen0/monitor0/image-path -s {0}")
assert computer._get_command('gnome') == gnome_based
assert computer._get_command('gnome2') == gnome_based
assert computer._get_command('cinnamon') == gnome_based
assert computer._get_command('mate') == mate_based
assert computer._get_command('xfce4') == xfce_based
assert computer._get_command('blablabla') is None
def test_set_wallpaper_with_unsupported_environment(computer):
with pytest.raises(SystemExit):
computer.set_wallpaper('blablabla', 'tmp/blabla.jpg')
def test_set_wallpaper(computer):
with mock.patch('wonderful_bing.wonderful_bing.subprocess') as subprocess:
subprocess.Popen.return_value.returncode = 0
computer.set_wallpaper('gnome', '/tmp/blabla.jpg')
command = computer._get_command('gnome').format('/tmp/blabla.jpg')
subprocess.Popen.assert_called_once_with(command, shell=True)
def test_show_notify(computer):
with mock.patch('wonderful_bing.wonderful_bing.subprocess') as subprocess:
computer.show_notify('Hello, world')
notify_icon = path.join(
path.dirname(path.dirname(path.realpath(__file__))),
'wonderful_bing/img/icon.png')
subprocess.Popen.assert_called_once_with(
["notify-send", "-a", "wonderful_bing", "-i",
notify_icon, "Today's Picture Story", "Hello, world"])
| mit | 8,599,398,593,725,011,000 | 34.915254 | 78 | 0.652666 | false | 3.45677 | true | false | false |
kste/aeshash | rebound/rebound.py | 1 | 4025 | '''
Created on Jan 9, 2014
@author: Stefan Koelbl
'''
from random import sample
from gost.gost import GOST
DDT = []
SBOX_MATCH = []
LinearStepList = []
InverseLinearStepList = []
possibleOutputDifferences = []
possibleInputDifferences = []
def propagateDifferencesThroughSout(state):
"""
Returns a state containing a possible output difference after applying
the S-Box for the given input difference.
"""
result = GOST()
for x in range(8):
for y in range(8):
result.setValue(x, y, sample(getValidDiffsForOutputDiff(state.getValue(x, y)), 1)[0])
return result
def propagateDifferencesThroughSin(state):
"""
Returns a state containing a possible input difference after applying
the inverse S-Box for the given output difference.
"""
result = GOST()
for x in range(8):
for y in range(8):
result.setValue(x, y, sample(getValidDiffsForInputDiff(state.getValue(x, y)), 1)[0])
return result
def computeLinearStepList():
"""
Compute the list of all possible values for
(x 0 0 0 0 0 0 0) * L = (y0 y1 y2 y3 y4 y5 y6 y7)
"""
global LinearStepList
gost = GOST()
for value in range(1, 256):
gost.setValue(0, 0, value)
LinearStepList.append(gost.L().getRow(0))
def computeInverseLinearStepList():
"""
Compute the list of all possible values for
(x 0 0 0 0 0 0 0) * Linverse = (y0 y1 y2 y3 y4 y5 y6 y7)
"""
global InverseLinearStepList
gost = GOST()
for value in range(1, 256):
gost.setValue(0, 0, value)
InverseLinearStepList.append(gost.Linverse().getRow(0))
def computeDDT(sbox):
"""
Compute the differential distribution table (DDT) for a given S-Box
"""
global DDT
DDT = [[0 for _ in range(len(sbox))] for _ in range(len(sbox))]
for a in range(len(sbox)):
for b in range(len(sbox)):
DDT[a ^ b][sbox[a] ^ sbox[b]] += 1
def computeSBOX_MATCH(sbox):
"""
Compute the valid pairs for each input/output difference.
"""
global SBOX_MATCH
SBOX_MATCH = [[[] for _ in range(len(sbox))] for _ in range(len(sbox))]
for a in range(len(sbox)):
for b in range(len(sbox)):
SBOX_MATCH[a ^ b][sbox[a] ^ sbox[b]].append([a, b])
def getValidBytePairsForOutputDiff(outputDiff):
"""
Get all possible pairs (a, b) such that:
S(a) xor S(b) = outputDiff
"""
bytePairs = []
for i in range(len(SBOX_MATCH)):
if(len(SBOX_MATCH[i][outputDiff]) > 0):
bytePairs.append(SBOX_MATCH[i][outputDiff])
return bytePairs
def getValidBytePairsForInputDiff(inputDiff):
"""
Get all possible pairs (a, b) such that:
Sinverse(a) xor Sinverse(b) = inputDiff
"""
bytePairs = []
for i in range(len(SBOX_MATCH)):
if(len(SBOX_MATCH[inputDiff][i]) > 0):
bytePairs.append(SBOX_MATCH[inputDiff][i])
return bytePairs
def getValidDiffsForInputDiff(inputDiff):
"""
Get all possible output differences for a given input difference.
"""
global possibleOutputDifferences
if not possibleOutputDifferences:
possibleOutputDifferences = [set([]) for _ in range(256)]
# Compute Table
for diffIn in range(256):
for diffOut in range(256):
if(DDT[diffIn][diffOut] > 0):
possibleOutputDifferences[diffIn].add(diffOut)
return possibleOutputDifferences[inputDiff]
def getValidDiffsForOutputDiff(outputDiff):
"""
Get all possible input differences for a given output difference.
"""
global possibleInputDifferences
if not possibleInputDifferences:
possibleInputDifferences = [set([]) for _ in range(256)]
# Compute Table
for diffIn in range(256):
for diffOut in range(256):
if(DDT[diffIn][diffOut] > 0):
possibleInputDifferences[diffOut].add(diffIn)
return possibleInputDifferences[outputDiff] | mit | 6,187,724,085,738,073,000 | 29.270677 | 97 | 0.627329 | false | 3.493924 | false | false | false |
lnsp/tea | runtime/parser.py | 1 | 23001 | """Parse an tokenized expression into an AST."""
import codecs
from runtime import ast, lexer, env, lib, flags
class ParseException(Exception):
def __init__(self, msg):
super().__init__("ParseException: " + msg)
class MissingOperand(ParseException):
def __init__(self, op):
super().__init__("%s is missing operands" % op)
class UnknownOperator(ParseException):
def __init__(self, op):
super().__init__("Unknown operator %s" % op)
class BadStatement(ParseException):
"""A statement exception."""
def __init__(self, msg="Bad statement without semicolon"):
super().__init__(msg)
class NotImplemented(ParseException):
"""A parse exception."""
def __init__(self, msg="Functionality not implemented"):
super().__init__(msg)
class InvalidStatement(ParseException):
def __init__(self, msg):
super().__init__("Invalid statement: Unexpected %s" % str(msg))
class InvalidDeclaration(ParseException):
def __init__(self, msg):
super().__init__("Invalid declaration: Unexpected %s" % str(msg))
class InvalidDefinition(ParseException):
def __init__(self, msg):
super().__init__("Invalid definition: Unexpected %s" % str(msg))
class InvalidAssignment(ParseException):
def __init__(self, msg="Invalid assignment"):
super().__init__(msg)
class InvalidBlock(ParseException):
def __init__(self, msg="Missing block borders"):
super().__init__(msg)
class InvalidExpression(ParseException):
def __init__(self, msg="Invalid expression"):
super().__init__(msg)
class InvalidCondition(ParseException):
def __init__(self, msg="Invalid condition"):
super().__init__(msg)
class InvalidLoop(ParseException):
def __init__(self, msg):
super().__init__("Invalid loop: Unexpected %s" % str(msg))
def is_assignment(token):
return token != None and (token.kind is lexer.OPERATOR) and (token.value in ["=", "+=", "-=", "*=", "/=", "%=", "^="])
def find_matching_block(stream, start):
level = 1
max = len(stream)
for i in range(start, max):
if stream[i].kind == lexer.LBLOCK:
level += 1
elif stream[i].kind == lexer.RBLOCK:
level -= 1
if level == 0:
return i
return -1
def find_matching_prt(stream, start):
level = 1
max = len(stream)
for i in range(start, max):
if flags.debug:
print("scanned", str(stream[i]), ":", level)
if stream[i].kind == lexer.LPRT:
level += 1
elif stream[i].kind == lexer.RPRT:
level -= 1
if level == 0:
return i
return -1
def get_arg_count(operator, last_token):
if operator in ["+", "-"] and (last_token == None or last_token.kind not in [lexer.NUMBER, lexer.IDENTIFIER, lexer.STRING, lexer.RPRT]):
if flags.debug:
print("unary operator because of", last_token)
return 1
elif operator in ["!"]:
return 1
if flags.debug:
print("binary because of", last_token)
return 2
def is_left_associative(operator, last_token):
if operator in ["+", "-"] and (last_token == None or last_token.kind not in [lexer.NUMBER, lexer.IDENTIFIER, lexer.STRING, lexer.RPRT]):
if flags.debug:
print("right associative because of", last_token)
return False
elif operator in ["!", "^"]:
return False
if flags.debug:
print("left associative because of", last_token)
return True
def get_precedence(operator, last_token):
if operator in ["+", "-"] and (last_token == None or last_token.kind not in [lexer.NUMBER, lexer.IDENTIFIER, lexer.STRING, lexer.RPRT]):
return 7
elif operator in ["!"]:
return 7
elif operator in ["^"]:
return 6
elif operator in ["*", "/"]:
return 5
elif operator in ["+", "-", ":"]:
return 4
elif operator in ["%"]:
return 3
elif operator in ["<", ">", "<=", ">=", "!=", "=="]:
return 2
elif operator in ["&&", "||", "^|"]:
return 1
return 0
def generate_expression(stream):
if flags.debug:
print("Starting generate expression")
operand_stack = []
operator_stack = []
max = len(stream) - 1
last_token = None
token = None
def pop_off_operator():
if len(operator_stack) < 1:
raise ParseException("Empty operator stack, could not pop off operator")
operator = operator_stack.pop()
if flags.debug:
print("popping of", operator)
arg_count = operator.tag("arg_count")
if len(operand_stack) < arg_count:
raise MissingOperand(operator.symbol)
for j in range(arg_count):
operator.add_front(operand_stack.pop())
operand_stack.append(operator)
for i in range(max + 1):
last_token = token
token = stream[i]
if flags.debug:
print(">>> Parsing next token:", token)
print("Operands: ", ', '.join(str(e) for e in operand_stack))
print("Operators:", ', '.join(str(e) for e in operator_stack))
if token.kind == lexer.NUMBER:
value = None
if '.' in token.value:
value = env.Value(lib.FLOAT, data=float(token.value))
else:
value = env.Value(lib.INTEGER, data=float(token.value))
operand_stack.append(ast.Literal(value))
elif token.kind == lexer.STRING:
stripped = token.value.strip("\"")
decoded = codecs.decode(stripped, "unicode_escape")
value = env.Value(lib.STRING, data=decoded)
operand_stack.append(ast.Literal(value))
elif token.kind == lexer.SEPARATOR:
while len(operator_stack) > 0 and operator_stack[-1] != "(":
pop_off_operator()
elif token.kind == lexer.IDENTIFIER:
if i < max and stream[i+1].kind == lexer.LPRT:
operator_stack.append(ast.Call(token.value))
else:
if token.value == "false":
operand_stack.append(ast.Literal(env.Value(lib.BOOLEAN, data=False)))
elif token.value == "true":
operand_stack.append(ast.Literal(env.Value(lib.BOOLEAN, data=True)))
elif token.value == "null":
operand_stack.append(ast.Literal(env.Value(lib.NULL)))
else:
operand_stack.append(ast.Identifier(token.value))
elif token.kind == lexer.OPERATOR:
new_operator = ast.Operation(token.value)
prec = get_precedence(token.value, last_token)
arg_count = get_arg_count(token.value, last_token)
left_associative = is_left_associative(token.value, last_token)
new_operator.tag("precedence", prec)
new_operator.tag("arg_count", arg_count)
new_operator.tag("left_associative", left_associative)
if flags.debug:
print("adding operator", new_operator.symbol, "to", len(operator_stack))
while len(operator_stack) > 0 and (type(operator_stack[-1]) is ast.Operation):
other = operator_stack[-1]
other_prec = operator_stack[-1].tag("precedence")
if flags.debug:
print("comparing precedence of ", new_operator.symbol, prec, "to", other.symbol, other_prec)
if left_associative:
if prec > other_prec:
break
else:
if prec >= other_prec:
break
pop_off_operator()
operator_stack.append(new_operator)
if flags.debug:
print("pushed operator on stack")
elif token.kind == lexer.LPRT:
operand_stack.append(token.value)
operator_stack.append(token.value)
elif token.kind == lexer.RPRT:
while len(operator_stack) > 0 and operator_stack[-1] != "(":
pop_off_operator()
if len(operator_stack) < 1:
raise ParseException("Mismatched parentheses")
operator_stack.pop()
if len(operator_stack) > 0 and type(operator_stack[-1]) is ast.Call:
function = operator_stack.pop()
while len(operand_stack) > 0 and operand_stack[-1] != "(":
function.add_front(operand_stack.pop())
operand_stack.pop()
operand_stack.append(function)
else:
j = len(operand_stack) - 1
while j >= 0 and operand_stack[j] != "(":
j -= 1
del operand_stack[j]
elif token.kind == lexer.STATEMENT:
while len(operator_stack) > 0:
pop_off_operator()
if len(operand_stack) > 1:
raise InvalidExpression()
if len(operand_stack) != 1:
raise InvalidExpression("Empty expression")
return operand_stack[0], i
last_token = token
while len(operator_stack) > 0:
pop_off_operator()
if flags.debug:
print("Operands: ", ', '.join(str(e) for e in operand_stack))
print("Operators:", ', '.join(str(e) for e in operator_stack))
if len(operand_stack) != 1:
raise InvalidExpression("Empty expression")
if flags.debug:
print("Parsed expression with length %d" % (max + 1))
return operand_stack[0], max + 1
def generate_declaration(stream):
if flags.debug:
print("Starting generating declaration")
end = len(stream)
for j in range(len(stream)):
if stream[j].kind is lexer.STATEMENT:
end = j
break
if end < 3:
raise ParseException("Declaration too short")
if not (stream[0].kind is lexer.IDENTIFIER and stream[0].value == "var"):
raise InvalidDeclaration(stream[0])
if stream[1].kind != lexer.IDENTIFIER:
raise InvalidDeclaration(stream[1])
declared_names = []
sequ = ast.Sequence()
expr_begin = 2
ignore_type = True
while expr_begin < end and ((stream[expr_begin].kind is lexer.SEPARATOR) or
((stream[expr_begin].kind is lexer.OPERATOR) and
(stream[expr_begin].value == ":" or
(stream[expr_begin].value == "=" and ignore_type)))):
if (stream[expr_begin].kind is lexer.OPERATOR) and stream[expr_begin].value == ":":
ignore_type = False
if expr_begin > 2 and stream[expr_begin - 2].kind != lexer.SEPARATOR:
raise InvalidDeclaration(stream[expr_begin - 2])
if stream[expr_begin - 1].kind is not lexer.IDENTIFIER:
raise InvalidDeclaration(stream[expr_begin - 1])
declared_names.append(stream[expr_begin - 1].value)
expr_begin += 2
if not ignore_type and stream[expr_begin - 1].kind is not lexer.IDENTIFIER:
raise InvalidDeclaration(stream[expr_begin - 1])
datatype = "null"
if not ignore_type:
datatype = stream[expr_begin - 1].value
else:
expr_begin -= 2
expr = None
if expr_begin < end and is_assignment(stream[expr_begin]):
expr, _ = generate_expression(stream[expr_begin + 1:])
for name in declared_names:
decl = ast.Declaration(name, datatype)
sequ.add(decl)
if expr is not None:
assgn = ast.Assignment(name, ignore_type)
assgn.add(expr)
expr = assgn
if expr is not None:
sequ.add(expr)
return sequ, end - 1
def generate_assignment(stream):
if flags.debug:
print("Starting generating assigment")
if len(stream) < 3:
raise InvalidAssignment()
name_token, equ_token = stream[0], stream[1]
if name_token.kind != lexer.IDENTIFIER or not is_assignment(equ_token):
raise InvalidAssignment()
expr, offset = generate_expression(stream[2:])
if flags.debug:
print("Expression has offset %d" % offset)
if len(equ_token.value) != 1:
operation = ast.Operation(equ_token.value[0])
operation.add(ast.Identifier(name_token.value))
operation.add(expr)
expr = operation
assgn = ast.Assignment(name_token.value)
assgn.add(expr)
if flags.debug:
print("Assignment has offset %d" % (1 + offset))
return assgn, 1 + offset
def generate_function(stream):
if flags.debug:
print("Starting generating function definition")
head_name = stream[0]
if head_name.kind is not lexer.IDENTIFIER:
raise InvalidDefinition(head_name)
fnc_name = head_name.value
head_start = stream[1]
if head_start.kind != lexer.LPRT:
raise InvalidDefinition(head_start)
head_end_index = find_matching_prt(stream, 2)
arguments = []
arg_index = 2
while arg_index < head_end_index:
if stream[arg_index].kind is not lexer.IDENTIFIER:
raise InvalidDefinition(stream[arg_index])
arg_name = stream[arg_index].value
if arg_index + 3 >= len(stream):
raise InvalidDefinition(stream[arg_index+1])
if (stream[arg_index+1].kind is not lexer.OPERATOR) or stream[arg_index+1].value != ":":
raise InvalidDefinitiom(stream[arg_index+1])
if stream[arg_index+2].kind is not lexer.IDENTIFIER:
raise InvalidDefinitiom(stream[arg_index+2])
arg_type = stream[arg_index+2].value
arguments.append(env.Value(arg_type, None, arg_name))
arg_index += 4
if flags.debug:
print("Adding arguments:", ', '.join(str(e) for e in arguments))
body_start_index = head_end_index + 1
body_start = stream[body_start_index]
if body_start.kind is not lexer.LBLOCK:
raise InvalidDefinition(body_start)
body, body_len = generate_sequence(stream[body_start_index+1:])
defi_node = ast.Definition(fnc_name, arguments)
defi_node.add(body)
return defi_node, 3 + head_end_index + body_len
def generate_if(stream):
if flags.debug:
print("Starting generating if statement")
cond_head = stream[0]
if not (cond_head.kind is lexer.IDENTIFIER and cond_head.value == "if"):
raise InvalidCondition()
cond_start_index = 1
cond_start = stream[cond_start_index]
if cond_start.kind != lexer.LPRT:
raise InvalidCondition()
cond_end_index = find_matching_prt(stream, cond_start_index + 1)
if cond_end_index == -1:
raise InvalidCondition()
cond_block = stream[cond_start_index+1:cond_end_index]
if flags.debug:
print("if-condition: " + ' '.join(str(e) for e in cond_block))
body_start_index = cond_end_index + 1
body_start = stream[body_start_index]
if body_start.kind != lexer.LBLOCK:
raise InvalidBlock()
body_end_index = find_matching_block(stream, body_start_index)
body_block = stream[body_start_index+1:body_end_index]
condition, cond_len = generate_expression(cond_block)
body, body_len = generate_sequence(body_block)
body.substitute = True
branch_node = ast.Branch()
cond_node = ast.Conditional()
cond_node.add(condition)
cond_node.add(body)
branch_node.add(cond_node)
# if ( .... ) { .... }
# 0 1 cond 2 3 body 4
offset = 4 + cond_len + body_len
if offset + 1 >= len(stream) or not (stream[offset+1].kind is lexer.IDENTIFIER and
stream[offset+1].value == "else"):
return branch_node, offset
if flags.debug:
print("Possible else (if) at", str(stream[offset+1]))
# else if? (offset+2 == 'if')
if stream[offset+2].kind is lexer.IDENTIFIER and stream[offset+2].value == "if":
if flags.debug:
print("Parsing else-if at token", offset + 2)
elif_node, elif_len = generate_if(stream[offset+2:])
branch_node.add(elif_node)
# ...... else .......
# offset 1 elif_len
offset += elif_len + 2
# guaranteed to be else
else:
if flags.debug:
print("Parsing else at token", offset + 2)
else_body, else_len = generate_sequence(stream[offset+3:])
else_body.substitute = True
branch_node.add(else_body)
# ...... else { ........ }
# offset 1 2 else_len 3
offset += else_len + 3
return branch_node, offset
def generate_for(stream):
if flags.debug:
print("Starting generating for statement")
for_ident = stream[0]
if not (for_ident.kind is lexer.IDENTIFIER and for_ident.value == "for"):
raise InvalidCondition()
cond_start = 2
head_start = stream[cond_start - 1]
if head_start.kind is not lexer.LPRT:
raise InvalidCondition()
head_end_index = find_matching_prt(stream, cond_start)
if head_end_index == -1:
raise InvalidCondition()
# find first ;
init_end_index = cond_start
for j in range(len(stream)):
if stream[j].kind is lexer.STATEMENT:
init_end_index = j
break
init_stmt, init_len = generate_sequence(stream[cond_start:init_end_index+1])
cond_expr, cond_len = generate_expression(stream[cond_start + init_len:head_end_index])
iter_stmt, iter_len = generate_sequence(stream[cond_start + init_len + cond_len:head_end_index])
body_start_index = head_end_index + 1
body_start = stream[body_start_index]
if body_start.kind is not lexer.LBLOCK:
raise InvalidBlock()
body_end_index = find_matching_block(stream, body_start_index + 1)
body, body_len = generate_sequence(stream[body_start_index+1:])
inner_sequ = ast.Sequence()
inner_sequ.add(body)
inner_sequ.add(iter_stmt)
loop = ast.Loop()
loop.add(cond_expr)
loop.add(inner_sequ)
sequ = ast.Sequence(True)
sequ.add(init_stmt)
sequ.add(loop)
return sequ, 4 + init_len + cond_len + iter_len + body_len
def generate_while(stream):
if flags.debug:
print("Starting generating while statement")
if len(stream) < 6:
raise InvalidLoop("length %d" % len(stream))
if not (stream[0].kind is lexer.IDENTIFIER and stream[0].value == "while"):
raise InvalidLoop(stream[0])
cond_start = stream[1]
if cond_start.kind != lexer.LPRT:
raise InvalidCondition()
cond_end_index = find_matching_prt(stream, 2)
if cond_end_index == -1:
raise InvalidCondition()
body_start_index = cond_end_index+1
body_start = stream[body_start_index]
if body_start.kind != lexer.LBLOCK:
raise InvalidBlock()
body_end_index = find_matching_block(stream, body_start_index+1)
condition, cond_len = generate_expression(stream[2:cond_end_index])
body, offset = generate_sequence(stream[body_start_index+1:])
body.substitute = True
loop = ast.Loop()
loop.add(condition)
loop.add(body)
return loop, 4 + cond_len + offset
def generate_sequence(stream):
if flags.debug:
print("Starting generating sequence")
print("Generating on", stream)
sequence = ast.Sequence()
stack = []
queue = []
max = len(stream) - 1
i = 0
def next():
if i < max:
return stream[i+1]
return None
while i <= max:
if flags.debug:
print("Operating on", i, stream[i])
token = stream[i]
if token.kind == lexer.IDENTIFIER:
if token.value == "func":
func, offset = generate_function(stream[i+1:])
sequence.add(func)
i += offset
elif token.value == "return":
expr, offset = generate_expression(stream[i+1:])
return_node = ast.Return()
return_node.add(expr)
sequence.add(return_node)
i += offset
elif token.value == "continue":
sequence.add(ast.Continue())
elif token.value == "break":
sequence.add(ast.Break())
elif token.value == "while":
while_node, offset = generate_while(stream[i:])
sequence.add(while_node)
i += offset
elif token.value == "if":
if_node, offset = generate_if(stream[i:])
sequence.add(if_node)
i += offset
elif token.value == "for":
for_node, offset = generate_for(stream[i:])
sequence.add(for_node)
i += offset
elif token.value == "import":
raise NotImplemented()
elif token.value == "var":
decl, offset = generate_declaration(stream[i:])
sequence.add(decl)
i += offset
else:
if i < max and is_assignment(next()):
assgn, offset = generate_assignment(stream[i:])
sequence.add(assgn)
i += offset
else:
expr, offset = generate_expression(stream[i:])
sequence.add(expr)
i += offset - 1
elif token.kind in [lexer.NUMBER, lexer.STRING, lexer.OPERATOR, lexer.LPRT]:
expr, offset = generate_expression(stream[i:])
sequence.add(expr)
i += offset
elif token.kind == lexer.LBLOCK:
sequ, offset = generate_sequence(stream[i+1:])
i += offset + 1
sequence.add(sequ)
elif token.kind == lexer.STATEMENT:
pass
elif token.kind == lexer.RBLOCK:
if flags.debug:
print("Stopping generating sequence")
return sequence, i
else:
raise InvalidStatement(stream[i])
i += 1
return sequence, i
def optimize_ast(root):
for i in range(len(root.children)):
node = root.children[i]
if type(node) is ast.Operation and node.symbol == ":":
values = node.children
datatype = values.pop().identity
if flags.debug:
print("Replacing cast of %s" % datatype)
node = ast.Cast(datatype)
node.children = values
root.children[i] = node
else:
optimize_ast(node)
def generate(tokens):
"""Parse the tokens to AST notation."""
# clean off whitespaces
clean = [t for t in tokens if t.kind != lexer.WHITESPACE]
if flags.debug:
print("Optimized tokens:", '; '.join(str(e) for e in clean))
sequ, _ = generate_sequence(clean)
if flags.debug:
print("Optimizing AST ...")
optimize_ast(sequ)
if flags.debug:
print("Final AST:", str(sequ))
return sequ
def demo_syntax_tree():
"""Initialize a demo syntax tree."""
tree = ast.syntax_tree()
return tree
| mit | -2,216,747,502,606,387,700 | 32.334783 | 140 | 0.573627 | false | 3.907747 | false | false | false |
eugeniominissale/ecommerce_api | models.py | 1 | 10882 | """
Models contains the database models for the application.
"""
import datetime
from uuid import uuid4
from passlib.hash import pbkdf2_sha256
from peewee import DateTimeField, TextField, CharField, BooleanField
from peewee import SqliteDatabase, DecimalField
from peewee import UUIDField, ForeignKeyField, IntegerField
from playhouse.signals import Model, post_delete, pre_delete
from exceptions import InsufficientAvailabilityException, WrongQuantity
from schemas import (ItemSchema, UserSchema, OrderSchema, OrderItemSchema,
BaseSchema, AddressSchema)
from utils import remove_image
database = SqliteDatabase('database.db')
class BaseModel(Model):
""" Base model for all the database models. """
created_at = DateTimeField(default=datetime.datetime.now)
updated_at = DateTimeField(default=datetime.datetime.now)
_schema = BaseSchema
def save(self, *args, **kwargs):
"""Automatically update updated_at time during save"""
self.updated_at = datetime.datetime.now()
return super(BaseModel, self).save(*args, **kwargs)
class Meta:
database = database
@classmethod
def get_all(cls):
return [o for o in cls.select()]
@classmethod
def json_list(cls, objs_list):
return cls._schema.jsonapi_list(objs_list)
def json(self, include_data=[]):
parsed, errors = self._schema.jsonapi(self, include_data)
return parsed
@classmethod
def validate_input(cls, data, partial=False):
return cls._schema.validate_input(data, partial=partial)
class Item(BaseModel):
"""
Product model
name: product unique name
price: product price
description: product description text
availability: number of available products of this kind
"""
uuid = UUIDField(unique=True)
name = CharField()
price = DecimalField(auto_round=True)
description = TextField()
availability = IntegerField()
_schema = ItemSchema
def __str__(self):
return '{}, {}, {}, {}'.format(
self.uuid,
self.name,
self.price,
self.description)
@database.atomic()
@pre_delete(sender=Item)
def on_delete_item_handler(model_class, instance):
"""Delete item pictures in cascade"""
pictures = Picture.select().join(Item).where(
Item.uuid == instance.uuid)
for pic in pictures:
pic.delete_instance()
class Picture(BaseModel):
"""
Picture model
uuid: picture identifier and file name stored
extension: picture type
item: referenced item
"""
uuid = UUIDField(unique=True)
extension = CharField()
item = ForeignKeyField(Item, related_name='pictures')
def filename(self):
return '{}.{}'.format(
self.uuid,
self.extension)
def json(self):
return {
'uuid': str(self.uuid),
'extension': self.extension,
'item_uuid': str(self.item.uuid)
}
def __str__(self):
return '{}.{} -> item: {}'.format(
self.uuid,
self.extension,
self.item.uuid)
@post_delete(sender=Picture)
def on_delete_picture_handler(model_class, instance):
"""Delete file picture"""
# TODO log eventual inconsistency
remove_image(instance.uuid, instance.extension)
class User(BaseModel):
"""
User represents an user for the application.
Users created are always as role "normal" (admin field = False)
"""
uuid = UUIDField(unique=True)
first_name = CharField()
last_name = CharField()
email = CharField(unique=True)
password = CharField()
admin = BooleanField(default=False)
_schema = UserSchema
@staticmethod
def exists(email):
"""
Check that an user exists by checking the email field (unique).
"""
try:
User.get(User.email == email)
except User.DoesNotExist:
return False
return True
@staticmethod
def hash_password(password):
"""Use passlib to get a crypted password.
:returns: str
"""
return pbkdf2_sha256.hash(password)
def verify_password(self, password):
"""
Verify a clear password against the stored hashed password of the user
using passlib.
:returns: bool
"""
return pbkdf2_sha256.verify(password, self.password)
class Address(BaseModel):
""" The model Address represent a user address.
Each address is releated to one user, but one user can have
more addresses."""
uuid = UUIDField(unique=True)
user = ForeignKeyField(User, related_name='addresses')
country = CharField()
city = CharField()
post_code = CharField()
address = CharField()
phone = CharField()
_schema = AddressSchema
class Order(BaseModel):
""" The model Order contains a list of orders - one row per order.
Each order will be place by one client.
An order is represented by an uuid,
a dateTimeField which is the date of the order, a FloatField which
is the total price of the order. Finally, there is the delivery address,
if it's different from the customers address from their record.
"""
uuid = UUIDField(unique=True, default=uuid4)
total_price = DecimalField(default=0)
delivery_address = ForeignKeyField(Address, related_name="orders")
user = ForeignKeyField(User, related_name="orders")
_schema = OrderSchema
class Meta:
order_by = ('created_at',)
@property
def order_items(self):
"""
Returns the list of OrderItem related to the order.
"""
query = (
OrderItem
.select(OrderItem, Order)
.join(Order)
.where(Order.uuid == self.uuid)
)
return [orderitem for orderitem in query]
def empty_order(self):
"""
Remove all the items from the order.
Delete all OrderItem related to this order and reset the total_price
value to 0.
"""
self.total_price = 0
OrderItem.delete().where(OrderItem.order == self).execute()
self.save()
return self
def add_item(self, item, quantity=1):
"""
Add one item to the order.
Creates one OrderItem row if the item is not present in the order yet,
or increasing the count of the existing OrderItem. It also updates the
item availability counter and raise InsufficientAvailability if
quantity is less than item availability.
:param item Item: instance of models.Item
"""
for orderitem in self.order_items:
# Looping all the OrderItem related to this order, if one with the
# same item is found we update that row.
if orderitem.item == item:
orderitem.add_item(quantity)
self.total_price += (item.price * quantity)
self.save()
return self
# if no existing OrderItem is found with this order and this Item,
# create a new row in the OrderItem table and use OrderItem.add_item
# to properly use the calculus logic that handles updating prices and
# availability. To use correctly add_item the initial quantity and
# subtotal are set to 0
OrderItem.create(
order=self,
item=item,
quantity=0,
subtotal=0,
).add_item(quantity)
self.total_price += (item.price * quantity)
self.save()
return self
def update_item(self, item, quantity):
"""
Update the quantity of the orderitem of the given item.
"""
for order_item in self.order_items:
if order_item.item == item:
diff = quantity - order_item.quantity
if diff > 0:
self.add_item(item, abs(diff))
elif diff < 0:
self.remove_item(item, abs(diff))
break
else:
self.add_item(item, quantity)
def remove_item(self, item, quantity=1):
"""
Remove the given item from the order, reducing quantity of the relative
OrderItem entity or deleting it if removing the last item
(OrderItem.quantity == 0).
It also restores the item availability.
"""
for orderitem in self.order_items:
if orderitem.item == item:
removed_items = orderitem.remove_item(quantity)
item.availability += quantity
item.save()
self.total_price -= (item.price * removed_items)
self.save()
return self
# No OrderItem found for this item
# TODO: Raise or return something more explicit
return self
class OrderItem(BaseModel):
""" The model OrderItem is a cross table that contains the order
items - one row for each item on an order (so each order can
generate multiple rows).
It contains two reference field. The first one is a reference
of the model Order and the other one is for the Item.
It contains also the quantity of the item and the total price
of that item.
"""
order = ForeignKeyField(Order)
item = ForeignKeyField(Item)
quantity = IntegerField()
subtotal = DecimalField()
_schema = OrderItemSchema
def add_item(self, quantity=1):
"""
Add one item to the OrderItem, increasing the quantity count and
recalculating the subtotal value for this item(s)
"""
if quantity > self.item.availability:
raise InsufficientAvailabilityException(self.item, quantity)
self.item.availability -= quantity
self.item.save()
self.quantity += quantity
self._calculate_subtotal()
self.save()
def remove_item(self, quantity=1):
"""
Remove one item from the OrderItem, decreasing the quantity count and
recalculating the subtotal value for this item(s)
:returns: int - quantity of items really removed.
"""
if self.quantity < quantity:
raise WrongQuantity('Quantity of items to be removed ({}) higher than availability ({})'
.format(quantity, self.quantity))
elif self.quantity > quantity:
self.quantity -= quantity
self._calculate_subtotal()
self.save()
else: # elif self.quantity == quantity
quantity = self.quantity
self.delete_instance()
return quantity
def _calculate_subtotal(self):
"""Calculate the subtotal value of the item(s) in the order."""
self.subtotal = self.item.price * self.quantity
| gpl-3.0 | -8,139,326,945,449,712,000 | 29.914773 | 100 | 0.61496 | false | 4.458009 | false | false | false |
pgmillon/ansible | lib/ansible/modules/network/fortimanager/fmgr_secprof_web.py | 38 | 43801 | #!/usr/bin/python
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
#
from __future__ import absolute_import, division, print_function
__metaclass__ = type
ANSIBLE_METADATA = {'status': ['preview'],
'supported_by': 'community',
'metadata_version': '1.1'}
DOCUMENTATION = '''
---
module: fmgr_secprof_web
version_added: "2.8"
notes:
- Full Documentation at U(https://ftnt-ansible-docs.readthedocs.io/en/latest/).
author:
- Luke Weighall (@lweighall)
- Andrew Welsh (@Ghilli3)
- Jim Huber (@p4r4n0y1ng)
short_description: Manage web filter security profiles in FortiManager
description:
- Manage web filter security profiles in FortiManager through playbooks using the FMG API
options:
adom:
description:
- The ADOM the configuration should belong to.
required: false
default: root
mode:
description:
- Sets one of three modes for managing the object.
- Allows use of soft-adds instead of overwriting existing values
choices: ['add', 'set', 'delete', 'update']
required: false
default: add
youtube_channel_status:
description:
- YouTube channel filter status.
- choice | disable | Disable YouTube channel filter.
- choice | blacklist | Block matches.
- choice | whitelist | Allow matches.
required: false
choices: ["disable", "blacklist", "whitelist"]
wisp_servers:
description:
- WISP servers.
required: false
wisp_algorithm:
description:
- WISP server selection algorithm.
- choice | auto-learning | Select the lightest loading healthy server.
- choice | primary-secondary | Select the first healthy server in order.
- choice | round-robin | Select the next healthy server.
required: false
choices: ["auto-learning", "primary-secondary", "round-robin"]
wisp:
description:
- Enable/disable web proxy WISP.
- choice | disable | Disable web proxy WISP.
- choice | enable | Enable web proxy WISP.
required: false
choices: ["disable", "enable"]
web_url_log:
description:
- Enable/disable logging URL filtering.
- choice | disable | Disable setting.
- choice | enable | Enable setting.
required: false
choices: ["disable", "enable"]
web_invalid_domain_log:
description:
- Enable/disable logging invalid domain names.
- choice | disable | Disable setting.
- choice | enable | Enable setting.
required: false
choices: ["disable", "enable"]
web_ftgd_quota_usage:
description:
- Enable/disable logging daily quota usage.
- choice | disable | Disable setting.
- choice | enable | Enable setting.
required: false
choices: ["disable", "enable"]
web_ftgd_err_log:
description:
- Enable/disable logging rating errors.
- choice | disable | Disable setting.
- choice | enable | Enable setting.
required: false
choices: ["disable", "enable"]
web_filter_vbs_log:
description:
- Enable/disable logging VBS scripts.
- choice | disable | Disable setting.
- choice | enable | Enable setting.
required: false
choices: ["disable", "enable"]
web_filter_unknown_log:
description:
- Enable/disable logging unknown scripts.
- choice | disable | Disable setting.
- choice | enable | Enable setting.
required: false
choices: ["disable", "enable"]
web_filter_referer_log:
description:
- Enable/disable logging referrers.
- choice | disable | Disable setting.
- choice | enable | Enable setting.
required: false
choices: ["disable", "enable"]
web_filter_jscript_log:
description:
- Enable/disable logging JScripts.
- choice | disable | Disable setting.
- choice | enable | Enable setting.
required: false
choices: ["disable", "enable"]
web_filter_js_log:
description:
- Enable/disable logging Java scripts.
- choice | disable | Disable setting.
- choice | enable | Enable setting.
required: false
choices: ["disable", "enable"]
web_filter_cookie_removal_log:
description:
- Enable/disable logging blocked cookies.
- choice | disable | Disable setting.
- choice | enable | Enable setting.
required: false
choices: ["disable", "enable"]
web_filter_cookie_log:
description:
- Enable/disable logging cookie filtering.
- choice | disable | Disable setting.
- choice | enable | Enable setting.
required: false
choices: ["disable", "enable"]
web_filter_command_block_log:
description:
- Enable/disable logging blocked commands.
- choice | disable | Disable setting.
- choice | enable | Enable setting.
required: false
choices: ["disable", "enable"]
web_filter_applet_log:
description:
- Enable/disable logging Java applets.
- choice | disable | Disable setting.
- choice | enable | Enable setting.
required: false
choices: ["disable", "enable"]
web_filter_activex_log:
description:
- Enable/disable logging ActiveX.
- choice | disable | Disable setting.
- choice | enable | Enable setting.
required: false
choices: ["disable", "enable"]
web_extended_all_action_log:
description:
- Enable/disable extended any filter action logging for web filtering.
- choice | disable | Disable setting.
- choice | enable | Enable setting.
required: false
choices: ["disable", "enable"]
web_content_log:
description:
- Enable/disable logging logging blocked web content.
- choice | disable | Disable setting.
- choice | enable | Enable setting.
required: false
choices: ["disable", "enable"]
replacemsg_group:
description:
- Replacement message group.
required: false
post_action:
description:
- Action taken for HTTP POST traffic.
- choice | normal | Normal, POST requests are allowed.
- choice | block | POST requests are blocked.
required: false
choices: ["normal", "block"]
ovrd_perm:
description:
- FLAG Based Options. Specify multiple in list form.
- flag | bannedword-override | Banned word override.
- flag | urlfilter-override | URL filter override.
- flag | fortiguard-wf-override | FortiGuard Web Filter override.
- flag | contenttype-check-override | Content-type header override.
required: false
choices:
- bannedword-override
- urlfilter-override
- fortiguard-wf-override
- contenttype-check-override
options:
description:
- FLAG Based Options. Specify multiple in list form.
- flag | block-invalid-url | Block sessions contained an invalid domain name.
- flag | jscript | Javascript block.
- flag | js | JS block.
- flag | vbs | VB script block.
- flag | unknown | Unknown script block.
- flag | wf-referer | Referring block.
- flag | intrinsic | Intrinsic script block.
- flag | wf-cookie | Cookie block.
- flag | per-user-bwl | Per-user black/white list filter
- flag | activexfilter | ActiveX filter.
- flag | cookiefilter | Cookie filter.
- flag | javafilter | Java applet filter.
required: false
choices:
- block-invalid-url
- jscript
- js
- vbs
- unknown
- wf-referer
- intrinsic
- wf-cookie
- per-user-bwl
- activexfilter
- cookiefilter
- javafilter
name:
description:
- Profile name.
required: false
log_all_url:
description:
- Enable/disable logging all URLs visited.
- choice | disable | Disable setting.
- choice | enable | Enable setting.
required: false
choices: ["disable", "enable"]
inspection_mode:
description:
- Web filtering inspection mode.
- choice | proxy | Proxy.
- choice | flow-based | Flow based.
required: false
choices: ["proxy", "flow-based"]
https_replacemsg:
description:
- Enable replacement messages for HTTPS.
- choice | disable | Disable setting.
- choice | enable | Enable setting.
required: false
choices: ["disable", "enable"]
extended_log:
description:
- Enable/disable extended logging for web filtering.
- choice | disable | Disable setting.
- choice | enable | Enable setting.
required: false
choices: ["disable", "enable"]
comment:
description:
- Optional comments.
required: false
ftgd_wf:
description:
- EXPERTS ONLY! KNOWLEDGE OF FMGR JSON API IS REQUIRED!
- List of multiple child objects to be added. Expects a list of dictionaries.
- Dictionaries must use FortiManager API parameters, not the ansible ones listed below.
- If submitted, all other prefixed sub-parameters ARE IGNORED.
- This object is MUTUALLY EXCLUSIVE with its options.
- We expect that you know what you are doing with these list parameters, and are leveraging the JSON API Guide.
- WHEN IN DOUBT, USE THE SUB OPTIONS BELOW INSTEAD TO CREATE OBJECTS WITH MULTIPLE TASKS
required: false
ftgd_wf_exempt_quota:
description:
- Do not stop quota for these categories.
required: false
ftgd_wf_max_quota_timeout:
description:
- Maximum FortiGuard quota used by single page view in seconds (excludes streams).
required: false
ftgd_wf_options:
description:
- Options for FortiGuard Web Filter.
- FLAG Based Options. Specify multiple in list form.
- flag | error-allow | Allow web pages with a rating error to pass through.
- flag | rate-server-ip | Rate the server IP in addition to the domain name.
- flag | connect-request-bypass | Bypass connection which has CONNECT request.
- flag | ftgd-disable | Disable FortiGuard scanning.
required: false
choices: ["error-allow", "rate-server-ip", "connect-request-bypass", "ftgd-disable"]
ftgd_wf_ovrd:
description:
- Allow web filter profile overrides.
required: false
ftgd_wf_rate_crl_urls:
description:
- Enable/disable rating CRL by URL.
- choice | disable | Disable rating CRL by URL.
- choice | enable | Enable rating CRL by URL.
required: false
choices: ["disable", "enable"]
ftgd_wf_rate_css_urls:
description:
- Enable/disable rating CSS by URL.
- choice | disable | Disable rating CSS by URL.
- choice | enable | Enable rating CSS by URL.
required: false
choices: ["disable", "enable"]
ftgd_wf_rate_image_urls:
description:
- Enable/disable rating images by URL.
- choice | disable | Disable rating images by URL (blocked images are replaced with blanks).
- choice | enable | Enable rating images by URL (blocked images are replaced with blanks).
required: false
choices: ["disable", "enable"]
ftgd_wf_rate_javascript_urls:
description:
- Enable/disable rating JavaScript by URL.
- choice | disable | Disable rating JavaScript by URL.
- choice | enable | Enable rating JavaScript by URL.
required: false
choices: ["disable", "enable"]
ftgd_wf_filters_action:
description:
- Action to take for matches.
- choice | block | Block access.
- choice | monitor | Allow access while logging the action.
- choice | warning | Allow access after warning the user.
- choice | authenticate | Authenticate user before allowing access.
required: false
choices: ["block", "monitor", "warning", "authenticate"]
ftgd_wf_filters_auth_usr_grp:
description:
- Groups with permission to authenticate.
required: false
ftgd_wf_filters_category:
description:
- Categories and groups the filter examines.
required: false
ftgd_wf_filters_log:
description:
- Enable/disable logging.
- choice | disable | Disable setting.
- choice | enable | Enable setting.
required: false
choices: ["disable", "enable"]
ftgd_wf_filters_override_replacemsg:
description:
- Override replacement message.
required: false
ftgd_wf_filters_warn_duration:
description:
- Duration of warnings.
required: false
ftgd_wf_filters_warning_duration_type:
description:
- Re-display warning after closing browser or after a timeout.
- choice | session | After session ends.
- choice | timeout | After timeout occurs.
required: false
choices: ["session", "timeout"]
ftgd_wf_filters_warning_prompt:
description:
- Warning prompts in each category or each domain.
- choice | per-domain | Per-domain warnings.
- choice | per-category | Per-category warnings.
required: false
choices: ["per-domain", "per-category"]
ftgd_wf_quota_category:
description:
- FortiGuard categories to apply quota to (category action must be set to monitor).
required: false
ftgd_wf_quota_duration:
description:
- Duration of quota.
required: false
ftgd_wf_quota_override_replacemsg:
description:
- Override replacement message.
required: false
ftgd_wf_quota_type:
description:
- Quota type.
- choice | time | Use a time-based quota.
- choice | traffic | Use a traffic-based quota.
required: false
choices: ["time", "traffic"]
ftgd_wf_quota_unit:
description:
- Traffic quota unit of measurement.
- choice | B | Quota in bytes.
- choice | KB | Quota in kilobytes.
- choice | MB | Quota in megabytes.
- choice | GB | Quota in gigabytes.
required: false
choices: ["B", "KB", "MB", "GB"]
ftgd_wf_quota_value:
description:
- Traffic quota value.
required: false
override:
description:
- EXPERTS ONLY! KNOWLEDGE OF FMGR JSON API IS REQUIRED!
- List of multiple child objects to be added. Expects a list of dictionaries.
- Dictionaries must use FortiManager API parameters, not the ansible ones listed below.
- If submitted, all other prefixed sub-parameters ARE IGNORED.
- This object is MUTUALLY EXCLUSIVE with its options.
- We expect that you know what you are doing with these list parameters, and are leveraging the JSON API Guide.
- WHEN IN DOUBT, USE THE SUB OPTIONS BELOW INSTEAD TO CREATE OBJECTS WITH MULTIPLE TASKS
required: false
override_ovrd_cookie:
description:
- Allow/deny browser-based (cookie) overrides.
- choice | deny | Deny browser-based (cookie) override.
- choice | allow | Allow browser-based (cookie) override.
required: false
choices: ["deny", "allow"]
override_ovrd_dur:
description:
- Override duration.
required: false
override_ovrd_dur_mode:
description:
- Override duration mode.
- choice | constant | Constant mode.
- choice | ask | Prompt for duration when initiating an override.
required: false
choices: ["constant", "ask"]
override_ovrd_scope:
description:
- Override scope.
- choice | user | Override for the user.
- choice | user-group | Override for the user's group.
- choice | ip | Override for the initiating IP.
- choice | ask | Prompt for scope when initiating an override.
- choice | browser | Create browser-based (cookie) override.
required: false
choices: ["user", "user-group", "ip", "ask", "browser"]
override_ovrd_user_group:
description:
- User groups with permission to use the override.
required: false
override_profile:
description:
- Web filter profile with permission to create overrides.
required: false
override_profile_attribute:
description:
- Profile attribute to retrieve from the RADIUS server.
- choice | User-Name | Use this attribute.
- choice | NAS-IP-Address | Use this attribute.
- choice | Framed-IP-Address | Use this attribute.
- choice | Framed-IP-Netmask | Use this attribute.
- choice | Filter-Id | Use this attribute.
- choice | Login-IP-Host | Use this attribute.
- choice | Reply-Message | Use this attribute.
- choice | Callback-Number | Use this attribute.
- choice | Callback-Id | Use this attribute.
- choice | Framed-Route | Use this attribute.
- choice | Framed-IPX-Network | Use this attribute.
- choice | Class | Use this attribute.
- choice | Called-Station-Id | Use this attribute.
- choice | Calling-Station-Id | Use this attribute.
- choice | NAS-Identifier | Use this attribute.
- choice | Proxy-State | Use this attribute.
- choice | Login-LAT-Service | Use this attribute.
- choice | Login-LAT-Node | Use this attribute.
- choice | Login-LAT-Group | Use this attribute.
- choice | Framed-AppleTalk-Zone | Use this attribute.
- choice | Acct-Session-Id | Use this attribute.
- choice | Acct-Multi-Session-Id | Use this attribute.
required: false
choices:
- User-Name
- NAS-IP-Address
- Framed-IP-Address
- Framed-IP-Netmask
- Filter-Id
- Login-IP-Host
- Reply-Message
- Callback-Number
- Callback-Id
- Framed-Route
- Framed-IPX-Network
- Class
- Called-Station-Id
- Calling-Station-Id
- NAS-Identifier
- Proxy-State
- Login-LAT-Service
- Login-LAT-Node
- Login-LAT-Group
- Framed-AppleTalk-Zone
- Acct-Session-Id
- Acct-Multi-Session-Id
override_profile_type:
description:
- Override profile type.
- choice | list | Profile chosen from list.
- choice | radius | Profile determined by RADIUS server.
required: false
choices: ["list", "radius"]
url_extraction:
description:
- EXPERTS ONLY! KNOWLEDGE OF FMGR JSON API IS REQUIRED!
- List of multiple child objects to be added. Expects a list of dictionaries.
- Dictionaries must use FortiManager API parameters, not the ansible ones listed below.
- If submitted, all other prefixed sub-parameters ARE IGNORED.
- This object is MUTUALLY EXCLUSIVE with its options.
- We expect that you know what you are doing with these list parameters, and are leveraging the JSON API Guide.
- WHEN IN DOUBT, USE THE SUB OPTIONS BELOW INSTEAD TO CREATE OBJECTS WITH MULTIPLE TASKS
required: false
url_extraction_redirect_header:
description:
- HTTP header name to use for client redirect on blocked requests
required: false
url_extraction_redirect_no_content:
description:
- Enable / Disable empty message-body entity in HTTP response
- choice | disable | Disable setting.
- choice | enable | Enable setting.
required: false
choices: ["disable", "enable"]
url_extraction_redirect_url:
description:
- HTTP header value to use for client redirect on blocked requests
required: false
url_extraction_server_fqdn:
description:
- URL extraction server FQDN (fully qualified domain name)
required: false
url_extraction_status:
description:
- Enable URL Extraction
- choice | disable | Disable setting.
- choice | enable | Enable setting.
required: false
choices: ["disable", "enable"]
web:
description:
- EXPERTS ONLY! KNOWLEDGE OF FMGR JSON API IS REQUIRED!
- List of multiple child objects to be added. Expects a list of dictionaries.
- Dictionaries must use FortiManager API parameters, not the ansible ones listed below.
- If submitted, all other prefixed sub-parameters ARE IGNORED.
- This object is MUTUALLY EXCLUSIVE with its options.
- We expect that you know what you are doing with these list parameters, and are leveraging the JSON API Guide.
- WHEN IN DOUBT, USE THE SUB OPTIONS BELOW INSTEAD TO CREATE OBJECTS WITH MULTIPLE TASKS
required: false
web_blacklist:
description:
- Enable/disable automatic addition of URLs detected by FortiSandbox to blacklist.
- choice | disable | Disable setting.
- choice | enable | Enable setting.
required: false
choices: ["disable", "enable"]
web_bword_table:
description:
- Banned word table ID.
required: false
web_bword_threshold:
description:
- Banned word score threshold.
required: false
web_content_header_list:
description:
- Content header list.
required: false
web_keyword_match:
description:
- Search keywords to log when match is found.
required: false
web_log_search:
description:
- Enable/disable logging all search phrases.
- choice | disable | Disable setting.
- choice | enable | Enable setting.
required: false
choices: ["disable", "enable"]
web_safe_search:
description:
- Safe search type.
- FLAG Based Options. Specify multiple in list form.
- flag | url | Insert safe search string into URL.
- flag | header | Insert safe search header.
required: false
choices: ["url", "header"]
web_urlfilter_table:
description:
- URL filter table ID.
required: false
web_whitelist:
description:
- FortiGuard whitelist settings.
- FLAG Based Options. Specify multiple in list form.
- flag | exempt-av | Exempt antivirus.
- flag | exempt-webcontent | Exempt web content.
- flag | exempt-activex-java-cookie | Exempt ActiveX-JAVA-Cookie.
- flag | exempt-dlp | Exempt DLP.
- flag | exempt-rangeblock | Exempt RangeBlock.
- flag | extended-log-others | Support extended log.
required: false
choices:
- exempt-av
- exempt-webcontent
- exempt-activex-java-cookie
- exempt-dlp
- exempt-rangeblock
- extended-log-others
web_youtube_restrict:
description:
- YouTube EDU filter level.
- choice | strict | Strict access for YouTube.
- choice | none | Full access for YouTube.
- choice | moderate | Moderate access for YouTube.
required: false
choices: ["strict", "none", "moderate"]
youtube_channel_filter:
description:
- EXPERTS ONLY! KNOWLEDGE OF FMGR JSON API IS REQUIRED!
- List of multiple child objects to be added. Expects a list of dictionaries.
- Dictionaries must use FortiManager API parameters, not the ansible ones listed below.
- If submitted, all other prefixed sub-parameters ARE IGNORED.
- This object is MUTUALLY EXCLUSIVE with its options.
- We expect that you know what you are doing with these list parameters, and are leveraging the JSON API Guide.
- WHEN IN DOUBT, USE THE SUB OPTIONS BELOW INSTEAD TO CREATE OBJECTS WITH MULTIPLE TASKS
required: false
youtube_channel_filter_channel_id:
description:
- YouTube channel ID to be filtered.
required: false
youtube_channel_filter_comment:
description:
- Comment.
required: false
'''
EXAMPLES = '''
- name: DELETE Profile
fmgr_secprof_web:
name: "Ansible_Web_Filter_Profile"
mode: "delete"
- name: CREATE Profile
fmgr_secprof_web:
name: "Ansible_Web_Filter_Profile"
comment: "Created by Ansible Module TEST"
mode: "set"
extended_log: "enable"
inspection_mode: "proxy"
log_all_url: "enable"
options: "js"
ovrd_perm: "bannedword-override"
post_action: "block"
web_content_log: "enable"
web_extended_all_action_log: "enable"
web_filter_activex_log: "enable"
web_filter_applet_log: "enable"
web_filter_command_block_log: "enable"
web_filter_cookie_log: "enable"
web_filter_cookie_removal_log: "enable"
web_filter_js_log: "enable"
web_filter_jscript_log: "enable"
web_filter_referer_log: "enable"
web_filter_unknown_log: "enable"
web_filter_vbs_log: "enable"
web_ftgd_err_log: "enable"
web_ftgd_quota_usage: "enable"
web_invalid_domain_log: "enable"
web_url_log: "enable"
wisp: "enable"
wisp_algorithm: "auto-learning"
youtube_channel_status: "blacklist"
'''
RETURN = """
api_result:
description: full API response, includes status code and message
returned: always
type: str
"""
from ansible.module_utils.basic import AnsibleModule, env_fallback
from ansible.module_utils.connection import Connection
from ansible.module_utils.network.fortimanager.fortimanager import FortiManagerHandler
from ansible.module_utils.network.fortimanager.common import FMGBaseException
from ansible.module_utils.network.fortimanager.common import FMGRCommon
from ansible.module_utils.network.fortimanager.common import FMGRMethods
from ansible.module_utils.network.fortimanager.common import DEFAULT_RESULT_OBJ
from ansible.module_utils.network.fortimanager.common import FAIL_SOCKET_MSG
from ansible.module_utils.network.fortimanager.common import prepare_dict
from ansible.module_utils.network.fortimanager.common import scrub_dict
def fmgr_webfilter_profile_modify(fmgr, paramgram):
mode = paramgram["mode"]
adom = paramgram["adom"]
response = DEFAULT_RESULT_OBJ
url = ""
datagram = {}
# EVAL THE MODE PARAMETER FOR SET OR ADD
if mode in ['set', 'add', 'update']:
url = '/pm/config/adom/{adom}/obj/webfilter/profile'.format(adom=adom)
datagram = scrub_dict(prepare_dict(paramgram))
# EVAL THE MODE PARAMETER FOR DELETE
elif mode == "delete":
# SET THE CORRECT URL FOR DELETE
url = '/pm/config/adom/{adom}/obj/webfilter/profile/{name}'.format(adom=adom, name=paramgram["name"])
datagram = {}
response = fmgr.process_request(url, datagram, paramgram["mode"])
return response
#############
# END METHODS
#############
def main():
argument_spec = dict(
adom=dict(type="str", default="root"),
mode=dict(choices=["add", "set", "delete", "update"], type="str", default="add"),
youtube_channel_status=dict(required=False, type="str", choices=["disable", "blacklist", "whitelist"]),
wisp_servers=dict(required=False, type="str"),
wisp_algorithm=dict(required=False, type="str", choices=["auto-learning", "primary-secondary", "round-robin"]),
wisp=dict(required=False, type="str", choices=["disable", "enable"]),
web_url_log=dict(required=False, type="str", choices=["disable", "enable"]),
web_invalid_domain_log=dict(required=False, type="str", choices=["disable", "enable"]),
web_ftgd_quota_usage=dict(required=False, type="str", choices=["disable", "enable"]),
web_ftgd_err_log=dict(required=False, type="str", choices=["disable", "enable"]),
web_filter_vbs_log=dict(required=False, type="str", choices=["disable", "enable"]),
web_filter_unknown_log=dict(required=False, type="str", choices=["disable", "enable"]),
web_filter_referer_log=dict(required=False, type="str", choices=["disable", "enable"]),
web_filter_jscript_log=dict(required=False, type="str", choices=["disable", "enable"]),
web_filter_js_log=dict(required=False, type="str", choices=["disable", "enable"]),
web_filter_cookie_removal_log=dict(required=False, type="str", choices=["disable", "enable"]),
web_filter_cookie_log=dict(required=False, type="str", choices=["disable", "enable"]),
web_filter_command_block_log=dict(required=False, type="str", choices=["disable", "enable"]),
web_filter_applet_log=dict(required=False, type="str", choices=["disable", "enable"]),
web_filter_activex_log=dict(required=False, type="str", choices=["disable", "enable"]),
web_extended_all_action_log=dict(required=False, type="str", choices=["disable", "enable"]),
web_content_log=dict(required=False, type="str", choices=["disable", "enable"]),
replacemsg_group=dict(required=False, type="str"),
post_action=dict(required=False, type="str", choices=["normal", "block"]),
ovrd_perm=dict(required=False, type="list", choices=["bannedword-override",
"urlfilter-override",
"fortiguard-wf-override",
"contenttype-check-override"]),
options=dict(required=False, type="list", choices=["block-invalid-url",
"jscript",
"js",
"vbs",
"unknown",
"wf-referer",
"intrinsic",
"wf-cookie",
"per-user-bwl",
"activexfilter",
"cookiefilter",
"javafilter"]),
name=dict(required=False, type="str"),
log_all_url=dict(required=False, type="str", choices=["disable", "enable"]),
inspection_mode=dict(required=False, type="str", choices=["proxy", "flow-based"]),
https_replacemsg=dict(required=False, type="str", choices=["disable", "enable"]),
extended_log=dict(required=False, type="str", choices=["disable", "enable"]),
comment=dict(required=False, type="str"),
ftgd_wf=dict(required=False, type="list"),
ftgd_wf_exempt_quota=dict(required=False, type="str"),
ftgd_wf_max_quota_timeout=dict(required=False, type="int"),
ftgd_wf_options=dict(required=False, type="str", choices=["error-allow", "rate-server-ip",
"connect-request-bypass", "ftgd-disable"]),
ftgd_wf_ovrd=dict(required=False, type="str"),
ftgd_wf_rate_crl_urls=dict(required=False, type="str", choices=["disable", "enable"]),
ftgd_wf_rate_css_urls=dict(required=False, type="str", choices=["disable", "enable"]),
ftgd_wf_rate_image_urls=dict(required=False, type="str", choices=["disable", "enable"]),
ftgd_wf_rate_javascript_urls=dict(required=False, type="str", choices=["disable", "enable"]),
ftgd_wf_filters_action=dict(required=False, type="str", choices=["block", "monitor",
"warning", "authenticate"]),
ftgd_wf_filters_auth_usr_grp=dict(required=False, type="str"),
ftgd_wf_filters_category=dict(required=False, type="str"),
ftgd_wf_filters_log=dict(required=False, type="str", choices=["disable", "enable"]),
ftgd_wf_filters_override_replacemsg=dict(required=False, type="str"),
ftgd_wf_filters_warn_duration=dict(required=False, type="str"),
ftgd_wf_filters_warning_duration_type=dict(required=False, type="str", choices=["session", "timeout"]),
ftgd_wf_filters_warning_prompt=dict(required=False, type="str", choices=["per-domain", "per-category"]),
ftgd_wf_quota_category=dict(required=False, type="str"),
ftgd_wf_quota_duration=dict(required=False, type="str"),
ftgd_wf_quota_override_replacemsg=dict(required=False, type="str"),
ftgd_wf_quota_type=dict(required=False, type="str", choices=["time", "traffic"]),
ftgd_wf_quota_unit=dict(required=False, type="str", choices=["B", "KB", "MB", "GB"]),
ftgd_wf_quota_value=dict(required=False, type="int"),
override=dict(required=False, type="list"),
override_ovrd_cookie=dict(required=False, type="str", choices=["deny", "allow"]),
override_ovrd_dur=dict(required=False, type="str"),
override_ovrd_dur_mode=dict(required=False, type="str", choices=["constant", "ask"]),
override_ovrd_scope=dict(required=False, type="str", choices=["user", "user-group", "ip", "ask", "browser"]),
override_ovrd_user_group=dict(required=False, type="str"),
override_profile=dict(required=False, type="str"),
override_profile_attribute=dict(required=False, type="list", choices=["User-Name",
"NAS-IP-Address",
"Framed-IP-Address",
"Framed-IP-Netmask",
"Filter-Id",
"Login-IP-Host",
"Reply-Message",
"Callback-Number",
"Callback-Id",
"Framed-Route",
"Framed-IPX-Network",
"Class",
"Called-Station-Id",
"Calling-Station-Id",
"NAS-Identifier",
"Proxy-State",
"Login-LAT-Service",
"Login-LAT-Node",
"Login-LAT-Group",
"Framed-AppleTalk-Zone",
"Acct-Session-Id",
"Acct-Multi-Session-Id"]),
override_profile_type=dict(required=False, type="str", choices=["list", "radius"]),
url_extraction=dict(required=False, type="list"),
url_extraction_redirect_header=dict(required=False, type="str"),
url_extraction_redirect_no_content=dict(required=False, type="str", choices=["disable", "enable"]),
url_extraction_redirect_url=dict(required=False, type="str"),
url_extraction_server_fqdn=dict(required=False, type="str"),
url_extraction_status=dict(required=False, type="str", choices=["disable", "enable"]),
web=dict(required=False, type="list"),
web_blacklist=dict(required=False, type="str", choices=["disable", "enable"]),
web_bword_table=dict(required=False, type="str"),
web_bword_threshold=dict(required=False, type="int"),
web_content_header_list=dict(required=False, type="str"),
web_keyword_match=dict(required=False, type="str"),
web_log_search=dict(required=False, type="str", choices=["disable", "enable"]),
web_safe_search=dict(required=False, type="str", choices=["url", "header"]),
web_urlfilter_table=dict(required=False, type="str"),
web_whitelist=dict(required=False, type="list", choices=["exempt-av",
"exempt-webcontent",
"exempt-activex-java-cookie",
"exempt-dlp",
"exempt-rangeblock",
"extended-log-others"]),
web_youtube_restrict=dict(required=False, type="str", choices=["strict", "none", "moderate"]),
youtube_channel_filter=dict(required=False, type="list"),
youtube_channel_filter_channel_id=dict(required=False, type="str"),
youtube_channel_filter_comment=dict(required=False, type="str"),
)
module = AnsibleModule(argument_spec=argument_spec, supports_check_mode=False, )
# MODULE PARAMGRAM
paramgram = {
"mode": module.params["mode"],
"adom": module.params["adom"],
"youtube-channel-status": module.params["youtube_channel_status"],
"wisp-servers": module.params["wisp_servers"],
"wisp-algorithm": module.params["wisp_algorithm"],
"wisp": module.params["wisp"],
"web-url-log": module.params["web_url_log"],
"web-invalid-domain-log": module.params["web_invalid_domain_log"],
"web-ftgd-quota-usage": module.params["web_ftgd_quota_usage"],
"web-ftgd-err-log": module.params["web_ftgd_err_log"],
"web-filter-vbs-log": module.params["web_filter_vbs_log"],
"web-filter-unknown-log": module.params["web_filter_unknown_log"],
"web-filter-referer-log": module.params["web_filter_referer_log"],
"web-filter-jscript-log": module.params["web_filter_jscript_log"],
"web-filter-js-log": module.params["web_filter_js_log"],
"web-filter-cookie-removal-log": module.params["web_filter_cookie_removal_log"],
"web-filter-cookie-log": module.params["web_filter_cookie_log"],
"web-filter-command-block-log": module.params["web_filter_command_block_log"],
"web-filter-applet-log": module.params["web_filter_applet_log"],
"web-filter-activex-log": module.params["web_filter_activex_log"],
"web-extended-all-action-log": module.params["web_extended_all_action_log"],
"web-content-log": module.params["web_content_log"],
"replacemsg-group": module.params["replacemsg_group"],
"post-action": module.params["post_action"],
"ovrd-perm": module.params["ovrd_perm"],
"options": module.params["options"],
"name": module.params["name"],
"log-all-url": module.params["log_all_url"],
"inspection-mode": module.params["inspection_mode"],
"https-replacemsg": module.params["https_replacemsg"],
"extended-log": module.params["extended_log"],
"comment": module.params["comment"],
"ftgd-wf": {
"exempt-quota": module.params["ftgd_wf_exempt_quota"],
"max-quota-timeout": module.params["ftgd_wf_max_quota_timeout"],
"options": module.params["ftgd_wf_options"],
"ovrd": module.params["ftgd_wf_ovrd"],
"rate-crl-urls": module.params["ftgd_wf_rate_crl_urls"],
"rate-css-urls": module.params["ftgd_wf_rate_css_urls"],
"rate-image-urls": module.params["ftgd_wf_rate_image_urls"],
"rate-javascript-urls": module.params["ftgd_wf_rate_javascript_urls"],
"filters": {
"action": module.params["ftgd_wf_filters_action"],
"auth-usr-grp": module.params["ftgd_wf_filters_auth_usr_grp"],
"category": module.params["ftgd_wf_filters_category"],
"log": module.params["ftgd_wf_filters_log"],
"override-replacemsg": module.params["ftgd_wf_filters_override_replacemsg"],
"warn-duration": module.params["ftgd_wf_filters_warn_duration"],
"warning-duration-type": module.params["ftgd_wf_filters_warning_duration_type"],
"warning-prompt": module.params["ftgd_wf_filters_warning_prompt"],
},
"quota": {
"category": module.params["ftgd_wf_quota_category"],
"duration": module.params["ftgd_wf_quota_duration"],
"override-replacemsg": module.params["ftgd_wf_quota_override_replacemsg"],
"type": module.params["ftgd_wf_quota_type"],
"unit": module.params["ftgd_wf_quota_unit"],
"value": module.params["ftgd_wf_quota_value"],
},
},
"override": {
"ovrd-cookie": module.params["override_ovrd_cookie"],
"ovrd-dur": module.params["override_ovrd_dur"],
"ovrd-dur-mode": module.params["override_ovrd_dur_mode"],
"ovrd-scope": module.params["override_ovrd_scope"],
"ovrd-user-group": module.params["override_ovrd_user_group"],
"profile": module.params["override_profile"],
"profile-attribute": module.params["override_profile_attribute"],
"profile-type": module.params["override_profile_type"],
},
"url-extraction": {
"redirect-header": module.params["url_extraction_redirect_header"],
"redirect-no-content": module.params["url_extraction_redirect_no_content"],
"redirect-url": module.params["url_extraction_redirect_url"],
"server-fqdn": module.params["url_extraction_server_fqdn"],
"status": module.params["url_extraction_status"],
},
"web": {
"blacklist": module.params["web_blacklist"],
"bword-table": module.params["web_bword_table"],
"bword-threshold": module.params["web_bword_threshold"],
"content-header-list": module.params["web_content_header_list"],
"keyword-match": module.params["web_keyword_match"],
"log-search": module.params["web_log_search"],
"safe-search": module.params["web_safe_search"],
"urlfilter-table": module.params["web_urlfilter_table"],
"whitelist": module.params["web_whitelist"],
"youtube-restrict": module.params["web_youtube_restrict"],
},
"youtube-channel-filter": {
"channel-id": module.params["youtube_channel_filter_channel_id"],
"comment": module.params["youtube_channel_filter_comment"],
}
}
module.paramgram = paramgram
fmgr = None
if module._socket_path:
connection = Connection(module._socket_path)
fmgr = FortiManagerHandler(connection, module)
fmgr.tools = FMGRCommon()
else:
module.fail_json(**FAIL_SOCKET_MSG)
list_overrides = ['ftgd-wf', 'override', 'url-extraction', 'web', 'youtube-channel-filter']
paramgram = fmgr.tools.paramgram_child_list_override(list_overrides=list_overrides,
paramgram=paramgram, module=module)
results = DEFAULT_RESULT_OBJ
try:
results = fmgr_webfilter_profile_modify(fmgr, paramgram)
fmgr.govern_response(module=module, results=results,
ansible_facts=fmgr.construct_ansible_facts(results, module.params, paramgram))
except Exception as err:
raise FMGBaseException(err)
return module.exit_json(**results[1])
if __name__ == "__main__":
main()
| gpl-3.0 | 5,868,044,743,491,713,000 | 39.332413 | 119 | 0.60695 | false | 4.128664 | false | false | false |
bamford/astrobamf | errors.py | 1 | 2901 | import numpy as N
def log10(x, xerr, mask=False):
z = N.log10(x)
zerr = N.absolute(xerr * N.log10(N.e)/x)
if not mask:
return N.array([z, zerr])
else:
ok = x > 0.0
return N.array([z, zerr]), ok
def pow10(x, xerr, mask=False):
z = 10**x
zerr = N.absolute(N.log(10) * xerr * z)
if not mask:
return N.array([z, zerr])
else:
ok = N.ones(z.shape, N.bool)
return N.array([z, zerr]), ok
def multiply(x, xerr, y, yerr, mask=False):
z = x*y
zerr = (xerr/x)**2 + (yerr/y)**2
zerr = N.absolute(N.sqrt(zerr) * z)
if not mask:
return N.array([z, zerr])
else:
ok = N.ones(z.shape, N.bool)
return N.array([z, zerr]), ok
def divide(x, xerr, y, yerr, mask=False):
z = x/y
zerr = (xerr/x)**2 + (yerr/y)**2
zerr = N.absolute(N.sqrt(zerr) * z)
if not mask:
return N.array([z, zerr])
else:
ok = x != 0.0
return N.array([z, zerr]), ok
def add(x, xerr, y, yerr, mask=False):
z = x+y
zerr = N.absolute((xerr)**2 + (yerr)**2)
zerr = N.sqrt(zerr)
if not mask:
return N.array([z, zerr])
else:
ok = N.ones(z.shape, N.bool)
return N.array([z, zerr]), ok
def subtract(x, xerr, y, yerr, mask=False):
z = x-y
zerr = N.absolute((xerr)**2 + (yerr)**2)
zerr = N.sqrt(zerr)
if not mask:
return N.array([z, zerr])
else:
ok = N.ones(z.shape, N.bool)
return N.array([z, zerr]), ok
def test():
n = 100000
a = 10.0
aerr = 2.0
b = 3.0
berr = 0.3
x = N.random.normal(a, aerr, n)
y = N.random.normal(b, berr, n)
# log10
t = N.log10(x)
z, zerr = log10(a, aerr)
delta = N.absolute(t.mean() - z)/t.mean()
deltaerr = N.absolute(t.std() - zerr)/t.std()
print 'log10', delta, deltaerr
# pow10
t = 10**y
z, zerr = pow10(b, berr)
delta = N.absolute(t.mean() - z)/t.mean()
deltaerr = N.absolute(t.std() - zerr)/t.std()
print 'pow10', delta, deltaerr
# multiply
t = N.multiply(x, y)
z, zerr = multiply(a, aerr, b, berr)
delta = N.absolute(t.mean() - z)/t.mean()
deltaerr = N.absolute(t.std() - zerr)/t.std()
print 'multiply', delta, deltaerr
# divide
t = N.divide(x, y)
z, zerr = divide(a, aerr, b, berr)
delta = N.absolute(t.mean() - z)/t.mean()
deltaerr = N.absolute(t.std() - zerr)/t.std()
print 'divide', delta, deltaerr
# add
t = N.add(x, y)
z, zerr = add(a, aerr, b, berr)
delta = N.absolute(t.mean() - z)/t.mean()
deltaerr = N.absolute(t.std() - zerr)/t.std()
print 'add', delta, deltaerr
# subtract
t = N.subtract(x, y)
z, zerr = subtract(a, aerr, b, berr)
delta = N.absolute(t.mean() - z)/t.mean()
deltaerr = N.absolute(t.std() - zerr)/t.std()
print 'subtract', delta, deltaerr
| mit | 4,019,453,284,759,647,700 | 26.628571 | 49 | 0.521889 | false | 2.580961 | false | false | false |
adam-rabinowitz/ngs_python | structure/unittest/alignedPairTest.py | 2 | 5114 | import unittest
import tempfile
import collections
import os
import multiprocessing
from ngs_python.structure import alignedPair
class PairTestCase(unittest.TestCase):
def setUp(self):
''' Create temporary directory and example read pairs '''
# Make temporary file
self.dirName = tempfile.mkdtemp()
self.testPair = self.dirName + 'test.pair'
# Create read pairs
self.pair1 = ('chr1',1,40,'+','chr1',1960,2000,'-')
self.pair2 = ('chr1',1,40,'+','chr1',1959,2001,'-')
self.pair3 = ('chr1',1,40,'+','chr2',1959,1999,'-')
self.pair4 = ('chr1',1,40,'+','chr1',1959,1999,'+')
self.pair5 = ('chr1',100,140,'-','chr1',100,140,'+')
self.pair6 = ('chr1',100,140,'-','chr1',90,130,'+')
self.pair7 = ('chr1',100,140,'-','chr1',90,141,'+')
self.pair8 = ('chr1',99,140,'-','chr1',100,130,'+')
# Create pair list
self.pairList = ([self.pair2] + [self.pair3] * 2 + [self.pair4] * 3 +
[self.pair5] + [self.pair6] * 2)
def tearDown(self):
''' Remove temporary files and directories '''
if os.path.isfile(self.testPair):
os.remove(self.testPair)
os.removedirs(self.dirName)
def readFile(self):
with open(self.testPair) as f:
data = f.readlines()
output = [d.strip().split('\t') for d in data]
return(output)
def processPair(self, rmDup, rmConcord, maxSize):
pipes = multiprocessing.Pipe(True)
process = multiprocessing.Process(
target = alignedPair.processPairs,
args = (pipes[0], self.testPair, rmDup, rmConcord, maxSize)
)
process.start()
pipes[0].close()
for pair in self.pairList:
pipes[1].send(pair)
pipes[1].send(None)
metrics = pipes[1].recv()
pipes[1].close()
process.join()
return(metrics)
class TestPairProcessing(PairTestCase):
def test_find_concordant(self):
''' Testing identification of concordant read pairs '''
# Check proper pair
self.assertTrue(alignedPair.concordant(self.pair1,2000))
# Check pair that is too big
self.assertFalse(alignedPair.concordant(self.pair2,2000))
# Check pair on different chromosome
self.assertFalse(alignedPair.concordant(self.pair3,2000))
# Check pair on same strand
self.assertFalse(alignedPair.concordant(self.pair4,2000))
# Check overlapping proper pairs
self.assertTrue(alignedPair.concordant(self.pair5,2000))
self.assertTrue(alignedPair.concordant(self.pair6,2000))
# Check when read pairs extend beyond each other
self.assertFalse(alignedPair.concordant(self.pair7,2000))
self.assertFalse(alignedPair.concordant(self.pair8,2000))
def test_process_concord_duplication(self):
''' Test correct processing of concordant and duplicated reads '''
# Check processing with concordant and duplicates removed
pairMetrics = self.processPair(rmDup = True, rmConcord = True,
maxSize = 2000)
self.assertEqual(self.readFile(), [map(str,self.pair2),
map(str,self.pair3), map(str,self.pair4)])
self.assertEqual(pairMetrics, collections.defaultdict(int, {
'total':9, 'unique':5, 'duplicate':4, 'concord':3, 'concorduni':2,
'discord':6, 'discorduni':3}))
# Check processing with duplicates removed
pairMetrics = self.processPair(rmDup = True, rmConcord = False,
maxSize = 2000)
self.assertEqual(self.readFile(), [map(str,self.pair2),
map(str,self.pair3), map(str,self.pair4), map(str,self.pair5),
map(str,self.pair6)])
self.assertEqual(pairMetrics, collections.defaultdict(int, {
'total':9, 'unique':5, 'duplicate':4, 'concord':3, 'concorduni':2,
'discord':6, 'discorduni':3}))
# Check processing with concordant removed
pairMetrics = self.processPair(rmDup = False, rmConcord = True,
maxSize = 2000)
self.assertEqual(self.readFile(), [map(str,self.pair2)] +
[map(str,self.pair3)] * 2 + [map(str,self.pair4)] * 3)
self.assertEqual(pairMetrics, collections.defaultdict(int, {
'total':9, 'unique':5, 'duplicate':4, 'concord':3, 'concorduni':2,
'discord':6, 'discorduni':3}))
# Check processing with nothing removed
pairMetrics = self.processPair(rmDup = False, rmConcord = False,
maxSize = 2000)
self.assertEqual(self.readFile(), [map(str,self.pair2)] +
[map(str,self.pair3)] * 2 + [map(str,self.pair4)] * 3 +
[map(str,self.pair5)] + [map(str,self.pair6)] * 2)
self.assertEqual(pairMetrics, collections.defaultdict(int, {
'total':9, 'unique':5, 'duplicate':4, 'concord':3, 'concorduni':2,
'discord':6, 'discorduni':3}))
suite = unittest.TestLoader().loadTestsFromTestCase(TestPairProcessing)
unittest.TextTestRunner(verbosity=3).run(suite)
| gpl-2.0 | -3,708,043,065,017,505,300 | 43.086207 | 78 | 0.609894 | false | 3.553857 | true | false | false |
boundlessgeo/QGIS | python/console/console_settings.py | 32 | 28058 | # -*- coding:utf-8 -*-
"""
/***************************************************************************
Python Console for QGIS
-------------------
begin : 2012-09-10
copyright : (C) 2012 by Salvatore Larosa
email : lrssvtml (at) gmail (dot) com
***************************************************************************/
/***************************************************************************
* *
* This program is free software; you can redistribute it and/or modify *
* it under the terms of the GNU General Public License as published by *
* the Free Software Foundation; either version 2 of the License, or *
* (at your option) any later version. *
* *
***************************************************************************/
Some portions of code were taken from https://code.google.com/p/pydee/
"""
from qgis.PyQt.QtCore import QCoreApplication, QSize, Qt
from qgis.PyQt.QtWidgets import QDialog, QFileDialog, QMessageBox, QTableWidgetItem
from qgis.PyQt.QtGui import QIcon, QFont, QColor, QFontDatabase
from qgis.core import QgsSettings
from .console_compile_apis import PrepareAPIDialog
from .ui_console_settings import Ui_SettingsDialogPythonConsole
class optionsDialog(QDialog, Ui_SettingsDialogPythonConsole):
DEFAULT_COLOR = "#4d4d4c"
KEYWORD_COLOR = "#8959a8"
CLASS_COLOR = "#4271ae"
METHOD_COLOR = "#4271ae"
DECORATION_COLOR = "#3e999f"
NUMBER_COLOR = "#c82829"
COMMENT_COLOR = "#8e908c"
COMMENT_BLOCK_COLOR = "#8e908c"
BACKGROUND_COLOR = "#ffffff"
CURSOR_COLOR = "#636363"
CARET_LINE_COLOR = "#efefef"
SINGLE_QUOTE_COLOR = "#718c00"
DOUBLE_QUOTE_COLOR = "#718c00"
TRIPLE_SINGLE_QUOTE_COLOR = "#eab700"
TRIPLE_DOUBLE_QUOTE_COLOR = "#eab700"
MARGIN_BACKGROUND_COLOR = "#efefef"
MARGIN_FOREGROUND_COLOR = "#636363"
SELECTION_BACKGROUND_COLOR = "#d7d7d7"
SELECTION_FOREGROUND_COLOR = "#303030"
MATCHED_BRACE_BACKGROUND_COLOR = "#b7f907"
MATCHED_BRACE_FOREGROUND_COLOR = "#303030"
EDGE_COLOR = "#efefef"
FOLD_COLOR = "#efefef"
ERROR_COLOR = "#e31a1c"
def __init__(self, parent):
QDialog.__init__(self, parent)
self.setWindowTitle(QCoreApplication.translate(
"SettingsDialogPythonConsole", "Python Console Settings"))
self.parent = parent
self.setupUi(self)
self.listPath = []
self.lineEdit.setReadOnly(True)
self.restoreSettings()
self.initialCheck()
self.addAPIpath.setIcon(QIcon(":/images/themes/default/symbologyAdd.svg"))
self.addAPIpath.setToolTip(QCoreApplication.translate("PythonConsole", "Add API path"))
self.removeAPIpath.setIcon(QIcon(":/images/themes/default/symbologyRemove.svg"))
self.removeAPIpath.setToolTip(QCoreApplication.translate("PythonConsole", "Remove API path"))
self.preloadAPI.stateChanged.connect(self.initialCheck)
self.addAPIpath.clicked.connect(self.loadAPIFile)
self.removeAPIpath.clicked.connect(self.removeAPI)
self.compileAPIs.clicked.connect(self._prepareAPI)
self.resetFontColor.setIcon(QIcon(":/images/themes/default/mActionUndo.svg"))
self.resetFontColor.setIconSize(QSize(18, 18))
self.resetFontColorEditor.setIcon(QIcon(":/images/themes/default/mActionUndo.svg"))
self.resetFontColorEditor.setIconSize(QSize(18, 18))
self.resetFontColor.clicked.connect(self._resetFontColor)
self.resetFontColorEditor.clicked.connect(self._resetFontColorEditor)
def initialCheck(self):
if self.preloadAPI.isChecked():
self.enableDisable(False)
else:
self.enableDisable(True)
def enableDisable(self, value):
self.tableWidget.setEnabled(value)
self.addAPIpath.setEnabled(value)
self.removeAPIpath.setEnabled(value)
self.groupBoxPreparedAPI.setEnabled(value)
def loadAPIFile(self):
settings = QgsSettings()
lastDirPath = settings.value("pythonConsole/lastDirAPIPath", "", type=str)
fileAPI, selected_filter = QFileDialog.getOpenFileName(
self, "Open API File", lastDirPath, "API file (*.api)")
if fileAPI:
self.addAPI(fileAPI)
settings.setValue("pythonConsole/lastDirAPIPath", fileAPI)
def _prepareAPI(self):
if self.tableWidget.rowCount() != 0:
pap_file, filter = QFileDialog().getSaveFileName(
self,
"",
'*.pap',
"Prepared APIs file (*.pap)")
else:
QMessageBox.information(
self, self.tr("Warning!"),
self.tr('You need to add some APIs file in order to compile'))
return
if pap_file:
api_lexer = 'QsciLexerPython'
api_files = []
count = self.tableWidget.rowCount()
for i in range(0, count):
api_files.append(self.tableWidget.item(i, 1).text())
api_dlg = PrepareAPIDialog(api_lexer, api_files, pap_file, self)
api_dlg.show()
api_dlg.activateWindow()
api_dlg.raise_()
api_dlg.prepareAPI()
self.lineEdit.setText(pap_file)
def accept(self):
if not self.preloadAPI.isChecked() and \
not self.groupBoxPreparedAPI.isChecked():
if self.tableWidget.rowCount() == 0:
QMessageBox.information(
self, self.tr("Warning!"),
self.tr('Please specify API file or check "Use preloaded API files"'))
return
if self.groupBoxPreparedAPI.isChecked() and \
not self.lineEdit.text():
QMessageBox.information(
self, self.tr("Warning!"),
QCoreApplication.translate('optionsDialog', 'The APIs file was not compiled, click on "Compile APIs…"')
)
return
self.saveSettings()
self.listPath = []
QDialog.accept(self)
def addAPI(self, pathAPI):
count = self.tableWidget.rowCount()
self.tableWidget.setColumnCount(2)
self.tableWidget.insertRow(count)
pathItem = QTableWidgetItem(pathAPI)
pathSplit = pathAPI.split("/")
apiName = pathSplit[-1][0:-4]
apiNameItem = QTableWidgetItem(apiName)
self.tableWidget.setItem(count, 0, apiNameItem)
self.tableWidget.setItem(count, 1, pathItem)
def removeAPI(self):
listItemSel = self.tableWidget.selectionModel().selectedRows()
for index in reversed(listItemSel):
self.tableWidget.removeRow(index.row())
def saveSettings(self):
settings = QgsSettings()
settings.setValue("pythonConsole/preloadAPI", self.preloadAPI.isChecked())
settings.setValue("pythonConsole/autoSaveScript", self.autoSaveScript.isChecked())
fontFamilyText = self.fontComboBox.currentText()
settings.setValue("pythonConsole/fontfamilytext", fontFamilyText)
fontFamilyTextEditor = self.fontComboBoxEditor.currentText()
settings.setValue("pythonConsole/fontfamilytextEditor", fontFamilyTextEditor)
fontSize = self.spinBox.value()
fontSizeEditor = self.spinBoxEditor.value()
for i in range(0, self.tableWidget.rowCount()):
text = self.tableWidget.item(i, 1).text()
self.listPath.append(text)
settings.setValue("pythonConsole/fontsize", fontSize)
settings.setValue("pythonConsole/fontsizeEditor", fontSizeEditor)
settings.setValue("pythonConsole/userAPI", self.listPath)
settings.setValue("pythonConsole/autoCompThreshold", self.autoCompThreshold.value())
settings.setValue("pythonConsole/autoCompThresholdEditor", self.autoCompThresholdEditor.value())
settings.setValue("pythonConsole/autoCompleteEnabledEditor", self.groupBoxAutoCompletionEditor.isChecked())
settings.setValue("pythonConsole/autoCompleteEnabled", self.groupBoxAutoCompletion.isChecked())
settings.setValue("pythonConsole/usePreparedAPIFile", self.groupBoxPreparedAPI.isChecked())
settings.setValue("pythonConsole/preparedAPIFile", self.lineEdit.text())
if self.autoCompFromAPIEditor.isChecked():
settings.setValue("pythonConsole/autoCompleteSourceEditor", 'fromAPI')
elif self.autoCompFromDocEditor.isChecked():
settings.setValue("pythonConsole/autoCompleteSourceEditor", 'fromDoc')
elif self.autoCompFromDocAPIEditor.isChecked():
settings.setValue("pythonConsole/autoCompleteSourceEditor", 'fromDocAPI')
if self.autoCompFromAPI.isChecked():
settings.setValue("pythonConsole/autoCompleteSource", 'fromAPI')
elif self.autoCompFromDoc.isChecked():
settings.setValue("pythonConsole/autoCompleteSource", 'fromDoc')
elif self.autoCompFromDocAPI.isChecked():
settings.setValue("pythonConsole/autoCompleteSource", 'fromDocAPI')
settings.setValue("pythonConsole/enableObjectInsp", self.enableObjectInspector.isChecked())
settings.setValue("pythonConsole/autoCloseBracket", self.autoCloseBracket.isChecked())
settings.setValue("pythonConsole/autoCloseBracketEditor", self.autoCloseBracketEditor.isChecked())
settings.setValue("pythonConsole/autoInsertionImport", self.autoInsertionImport.isChecked())
settings.setValue("pythonConsole/autoInsertionImportEditor", self.autoInsertionImportEditor.isChecked())
settings.setValue("pythonConsole/defaultFontColor", self.defaultFontColor.color())
settings.setValue("pythonConsole/defaultFontColorEditor", self.defaultFontColorEditor.color())
settings.setValue("pythonConsole/classFontColor", self.classFontColor.color())
settings.setValue("pythonConsole/classFontColorEditor", self.classFontColorEditor.color())
settings.setValue("pythonConsole/keywordFontColor", self.keywordFontColor.color())
settings.setValue("pythonConsole/keywordFontColorEditor", self.keywordFontColorEditor.color())
settings.setValue("pythonConsole/decorFontColor", self.decorFontColor.color())
settings.setValue("pythonConsole/decorFontColorEditor", self.decorFontColorEditor.color())
settings.setValue("pythonConsole/numberFontColor", self.numberFontColor.color())
settings.setValue("pythonConsole/numberFontColorEditor", self.numberFontColorEditor.color())
settings.setValue("pythonConsole/methodFontColor", self.methodFontColor.color())
settings.setValue("pythonConsole/methodFontColorEditor", self.methodFontColorEditor.color())
settings.setValue("pythonConsole/commentFontColor", self.commentFontColor.color())
settings.setValue("pythonConsole/commentFontColorEditor", self.commentFontColorEditor.color())
settings.setValue("pythonConsole/commentBlockFontColor", self.commentBlockFontColor.color())
settings.setValue("pythonConsole/commentBlockFontColorEditor", self.commentBlockFontColorEditor.color())
settings.setValue("pythonConsole/paperBackgroundColor", self.paperBackgroundColor.color())
settings.setValue("pythonConsole/paperBackgroundColorEditor", self.paperBackgroundColorEditor.color())
settings.setValue("pythonConsole/cursorColor", self.cursorColor.color())
settings.setValue("pythonConsole/cursorColorEditor", self.cursorColorEditor.color())
settings.setValue("pythonConsole/caretLineColor", self.caretLineColor.color())
settings.setValue("pythonConsole/caretLineColorEditor", self.caretLineColorEditor.color())
settings.setValue("pythonConsole/stderrFontColor", self.stderrFontColor.color())
settings.setValue("pythonConsole/singleQuoteFontColor", self.singleQuoteFontColor.color())
settings.setValue("pythonConsole/singleQuoteFontColorEditor", self.singleQuoteFontColorEditor.color())
settings.setValue("pythonConsole/doubleQuoteFontColor", self.doubleQuoteFontColor.color())
settings.setValue("pythonConsole/doubleQuoteFontColorEditor", self.doubleQuoteFontColorEditor.color())
settings.setValue("pythonConsole/tripleSingleQuoteFontColor", self.tripleSingleQuoteFontColor.color())
settings.setValue("pythonConsole/tripleSingleQuoteFontColorEditor",
self.tripleSingleQuoteFontColorEditor.color())
settings.setValue("pythonConsole/tripleDoubleQuoteFontColor", self.tripleDoubleQuoteFontColor.color())
settings.setValue("pythonConsole/tripleDoubleQuoteFontColorEditor",
self.tripleDoubleQuoteFontColorEditor.color())
settings.setValue("pythonConsole/edgeColorEditor", self.edgeColorEditor.color())
settings.setValue("pythonConsole/marginBackgroundColor", self.marginBackgroundColor.color())
settings.setValue("pythonConsole/marginBackgroundColorEditor", self.marginBackgroundColorEditor.color())
settings.setValue("pythonConsole/marginForegroundColor", self.marginForegroundColor.color())
settings.setValue("pythonConsole/marginForegroundColorEditor", self.marginForegroundColorEditor.color())
settings.setValue("pythonConsole/foldColorEditor", self.foldColorEditor.color())
settings.setValue("pythonConsole/selectionBackgroundColor", self.selectionBackgroundColor.color())
settings.setValue("pythonConsole/selectionBackgroundColorEditor", self.selectionBackgroundColorEditor.color())
settings.setValue("pythonConsole/selectionForegroundColor", self.selectionForegroundColor.color())
settings.setValue("pythonConsole/selectionForegroundColorEditor", self.selectionForegroundColorEditor.color())
settings.setValue("pythonConsole/matchedBraceBackgroundColor", self.matchedBraceBackgroundColor.color())
settings.setValue("pythonConsole/matchedBraceBackgroundColorEditor", self.matchedBraceBackgroundColorEditor.color())
settings.setValue("pythonConsole/matchedBraceForegroundColor", self.matchedBraceForegroundColor.color())
settings.setValue("pythonConsole/matchedBraceForegroundColorEditor", self.matchedBraceForegroundColorEditor.color())
def restoreSettings(self):
settings = QgsSettings()
font = QFontDatabase.systemFont(QFontDatabase.FixedFont)
self.spinBox.setValue(settings.value("pythonConsole/fontsize", font.pointSize(), type=int))
self.spinBoxEditor.setValue(settings.value("pythonConsole/fontsizeEditor", font.pointSize(), type=int))
self.fontComboBox.setCurrentFont(QFont(settings.value("pythonConsole/fontfamilytext",
font.family())))
self.fontComboBoxEditor.setCurrentFont(QFont(settings.value("pythonConsole/fontfamilytextEditor",
font.family())))
self.preloadAPI.setChecked(settings.value("pythonConsole/preloadAPI", True, type=bool))
self.lineEdit.setText(settings.value("pythonConsole/preparedAPIFile", "", type=str))
itemTable = settings.value("pythonConsole/userAPI", [])
if itemTable:
self.tableWidget.setRowCount(0)
for i in range(len(itemTable)):
self.tableWidget.insertRow(i)
self.tableWidget.setColumnCount(2)
pathSplit = itemTable[i].split("/")
apiName = pathSplit[-1][0:-4]
self.tableWidget.setItem(i, 0, QTableWidgetItem(apiName))
self.tableWidget.setItem(i, 1, QTableWidgetItem(itemTable[i]))
self.autoSaveScript.setChecked(settings.value("pythonConsole/autoSaveScript", False, type=bool))
self.autoCompThreshold.setValue(settings.value("pythonConsole/autoCompThreshold", 2, type=int))
self.autoCompThresholdEditor.setValue(settings.value("pythonConsole/autoCompThresholdEditor", 2, type=int))
self.groupBoxAutoCompletionEditor.setChecked(
settings.value("pythonConsole/autoCompleteEnabledEditor", True, type=bool))
self.groupBoxAutoCompletion.setChecked(settings.value("pythonConsole/autoCompleteEnabled", True, type=bool))
self.enableObjectInspector.setChecked(settings.value("pythonConsole/enableObjectInsp", False, type=bool))
self.autoCloseBracketEditor.setChecked(settings.value("pythonConsole/autoCloseBracketEditor", False, type=bool))
self.autoCloseBracket.setChecked(settings.value("pythonConsole/autoCloseBracket", False, type=bool))
self.autoInsertionImportEditor.setChecked(
settings.value("pythonConsole/autoInsertionImportEditor", True, type=bool))
self.autoInsertionImport.setChecked(settings.value("pythonConsole/autoInsertionImport", True, type=bool))
if settings.value("pythonConsole/autoCompleteSource") == 'fromDoc':
self.autoCompFromDoc.setChecked(True)
elif settings.value("pythonConsole/autoCompleteSource") == 'fromAPI':
self.autoCompFromAPI.setChecked(True)
elif settings.value("pythonConsole/autoCompleteSource") == 'fromDocAPI':
self.autoCompFromDocAPI.setChecked(True)
if settings.value("pythonConsole/autoCompleteSourceEditor") == 'fromDoc':
self.autoCompFromDocEditor.setChecked(True)
elif settings.value("pythonConsole/autoCompleteSourceEditor") == 'fromAPI':
self.autoCompFromAPIEditor.setChecked(True)
elif settings.value("pythonConsole/autoCompleteSourceEditor") == 'fromDocAPI':
self.autoCompFromDocAPIEditor.setChecked(True)
# Setting font lexer color
self.defaultFontColor.setColor(QColor(settings.value("pythonConsole/defaultFontColor", QColor(self.DEFAULT_COLOR))))
self.defaultFontColorEditor.setColor(
QColor(settings.value("pythonConsole/defaultFontColorEditor", QColor(self.DEFAULT_COLOR))))
self.keywordFontColor.setColor(QColor(settings.value("pythonConsole/keywordFontColor", QColor(self.KEYWORD_COLOR))))
self.keywordFontColorEditor.setColor(
QColor(settings.value("pythonConsole/keywordFontColorEditor", QColor(self.KEYWORD_COLOR))))
self.classFontColor.setColor(QColor(settings.value("pythonConsole/classFontColor", QColor(self.CLASS_COLOR))))
self.classFontColorEditor.setColor(
QColor(settings.value("pythonConsole/classFontColorEditor", QColor(self.CLASS_COLOR))))
self.methodFontColor.setColor(QColor(settings.value("pythonConsole/methodFontColor", QColor(self.METHOD_COLOR))))
self.methodFontColorEditor.setColor(
QColor(settings.value("pythonConsole/methodFontColorEditor", QColor(self.METHOD_COLOR))))
self.decorFontColor.setColor(QColor(settings.value("pythonConsole/decorFontColor", QColor(self.DECORATION_COLOR))))
self.decorFontColorEditor.setColor(
QColor(settings.value("pythonConsole/decorFontColorEditor", QColor(self.DECORATION_COLOR))))
self.numberFontColor.setColor(QColor(settings.value("pythonConsole/numberFontColor", QColor(self.NUMBER_COLOR))))
self.numberFontColorEditor.setColor(
QColor(settings.value("pythonConsole/numberFontColorEditor", QColor(self.NUMBER_COLOR))))
self.commentFontColor.setColor(QColor(settings.value("pythonConsole/commentFontColor", QColor(self.COMMENT_COLOR))))
self.commentFontColorEditor.setColor(
QColor(settings.value("pythonConsole/commentFontColorEditor", QColor(self.COMMENT_COLOR))))
self.commentBlockFontColor.setColor(
QColor(settings.value("pythonConsole/commentBlockFontColor", QColor(self.COMMENT_BLOCK_COLOR))))
self.commentBlockFontColorEditor.setColor(
QColor(settings.value("pythonConsole/commentBlockFontColorEditor", QColor(self.COMMENT_BLOCK_COLOR))))
self.paperBackgroundColor.setColor(
QColor(settings.value("pythonConsole/paperBackgroundColor", QColor(self.BACKGROUND_COLOR))))
self.paperBackgroundColorEditor.setColor(
QColor(settings.value("pythonConsole/paperBackgroundColorEditor", QColor(self.BACKGROUND_COLOR))))
self.caretLineColor.setColor(QColor(settings.value("pythonConsole/caretLineColor", QColor(self.CARET_LINE_COLOR))))
self.caretLineColorEditor.setColor(
QColor(settings.value("pythonConsole/caretLineColorEditor", QColor(self.CARET_LINE_COLOR))))
self.cursorColor.setColor(QColor(settings.value("pythonConsole/cursorColor", QColor(self.CURSOR_COLOR))))
self.cursorColorEditor.setColor(QColor(settings.value("pythonConsole/cursorColorEditor", QColor(self.CURSOR_COLOR))))
self.singleQuoteFontColor.setColor(settings.value("pythonConsole/singleQuoteFontColor", QColor(self.SINGLE_QUOTE_COLOR)))
self.singleQuoteFontColorEditor.setColor(
settings.value("pythonConsole/singleQuoteFontColorEditor", QColor(self.SINGLE_QUOTE_COLOR)))
self.doubleQuoteFontColor.setColor(settings.value("pythonConsole/doubleQuoteFontColor", QColor(self.DOUBLE_QUOTE_COLOR)))
self.doubleQuoteFontColorEditor.setColor(
settings.value("pythonConsole/doubleQuoteFontColorEditor", QColor(self.DOUBLE_QUOTE_COLOR)))
self.tripleSingleQuoteFontColor.setColor(
settings.value("pythonConsole/tripleSingleQuoteFontColor", QColor(self.TRIPLE_SINGLE_QUOTE_COLOR)))
self.tripleSingleQuoteFontColorEditor.setColor(
settings.value("pythonConsole/tripleSingleQuoteFontColorEditor", QColor(self.TRIPLE_SINGLE_QUOTE_COLOR)))
self.tripleDoubleQuoteFontColor.setColor(
settings.value("pythonConsole/tripleDoubleQuoteFontColor", QColor(self.TRIPLE_DOUBLE_QUOTE_COLOR)))
self.tripleDoubleQuoteFontColorEditor.setColor(
settings.value("pythonConsole/tripleDoubleQuoteFontColorEditor", QColor(self.TRIPLE_DOUBLE_QUOTE_COLOR)))
self.marginBackgroundColor.setColor(settings.value("pythonConsole/marginBackgroundColor", QColor(self.MARGIN_BACKGROUND_COLOR)))
self.marginBackgroundColorEditor.setColor(settings.value("pythonConsole/marginBackgroundColorEditor", QColor(self.MARGIN_BACKGROUND_COLOR)))
self.marginForegroundColor.setColor(settings.value("pythonConsole/marginForegroundColor", QColor(self.MARGIN_FOREGROUND_COLOR)))
self.marginForegroundColorEditor.setColor(settings.value("pythonConsole/marginForegroundColorEditor", QColor(self.MARGIN_FOREGROUND_COLOR)))
self.selectionForegroundColor.setColor(settings.value("pythonConsole/selectionForegroundColor", QColor(self.SELECTION_FOREGROUND_COLOR)))
self.selectionForegroundColorEditor.setColor(settings.value("pythonConsole/selectionForegroundColorEditor", QColor(self.SELECTION_FOREGROUND_COLOR)))
self.selectionBackgroundColor.setColor(settings.value("pythonConsole/selectionBackgroundColor", QColor(self.SELECTION_BACKGROUND_COLOR)))
self.selectionBackgroundColorEditor.setColor(settings.value("pythonConsole/selectionBackgroundColorEditor", QColor(self.SELECTION_BACKGROUND_COLOR)))
self.matchedBraceForegroundColor.setColor(settings.value("pythonConsole/matchedBraceForegroundColor", QColor(self.MATCHED_BRACE_FOREGROUND_COLOR)))
self.matchedBraceForegroundColorEditor.setColor(settings.value("pythonConsole/matchedBraceForegroundColorEditor", QColor(self.MATCHED_BRACE_FOREGROUND_COLOR)))
self.matchedBraceBackgroundColor.setColor(settings.value("pythonConsole/matchedBraceBackgroundColor", QColor(self.MATCHED_BRACE_BACKGROUND_COLOR)))
self.matchedBraceBackgroundColorEditor.setColor(settings.value("pythonConsole/matchedBraceBackgroundColorEditor", QColor(self.MATCHED_BRACE_BACKGROUND_COLOR)))
self.stderrFontColor.setColor(QColor(settings.value("pythonConsole/stderrFontColor", QColor(self.ERROR_COLOR))))
self.edgeColorEditor.setColor(settings.value("pythonConsole/edgeColorEditor", QColor(self.EDGE_COLOR)))
self.foldColorEditor.setColor(settings.value("pythonConsole/foldColorEditor", QColor(self.FOLD_COLOR)))
def _resetFontColor(self):
self.defaultFontColor.setColor(QColor(self.DEFAULT_COLOR))
self.keywordFontColor.setColor(QColor(self.KEYWORD_COLOR))
self.classFontColor.setColor(QColor(self.CLASS_COLOR))
self.methodFontColor.setColor(QColor(self.METHOD_COLOR))
self.decorFontColor.setColor(QColor(self.DECORATION_COLOR))
self.numberFontColor.setColor(QColor(self.NUMBER_COLOR))
self.commentFontColor.setColor(QColor(self.COMMENT_COLOR))
self.commentBlockFontColor.setColor(QColor(self.COMMENT_BLOCK_COLOR))
self.paperBackgroundColor.setColor(QColor(self.BACKGROUND_COLOR))
self.cursorColor.setColor(QColor(self.CURSOR_COLOR))
self.caretLineColor.setColor(QColor(self.CARET_LINE_COLOR))
self.singleQuoteFontColor.setColor(QColor(self.SINGLE_QUOTE_COLOR))
self.doubleQuoteFontColor.setColor(QColor(self.DOUBLE_QUOTE_COLOR))
self.tripleSingleQuoteFontColor.setColor(QColor(self.TRIPLE_SINGLE_QUOTE_COLOR))
self.tripleDoubleQuoteFontColor.setColor(QColor(self.TRIPLE_DOUBLE_QUOTE_COLOR))
self.marginBackgroundColor.setColor(QColor(self.MARGIN_BACKGROUND_COLOR))
self.marginForegroundColor.setColor(QColor(self.MARGIN_FOREGROUND_COLOR))
self.selectionBackgroundColor.setColor(QColor(self.SELECTION_BACKGROUND_COLOR))
self.selectionForegroundColor.setColor(QColor(self.SELECTION_FOREGROUND_COLOR))
self.matchedBraceBackgroundColor.setColor(QColor(self.MATCHED_BRACE_BACKGROUND_COLOR))
self.matchedBraceForegroundColor.setColor(QColor(self.MATCHED_BRACE_FOREGROUND_COLOR))
self.stderrFontColor.setColor(QColor(self.ERROR_COLOR))
def _resetFontColorEditor(self):
self.defaultFontColorEditor.setColor(QColor(self.DEFAULT_COLOR))
self.keywordFontColorEditor.setColor(QColor(self.KEYWORD_COLOR))
self.classFontColorEditor.setColor(QColor(self.CLASS_COLOR))
self.methodFontColorEditor.setColor(QColor(self.METHOD_COLOR))
self.decorFontColorEditor.setColor(QColor(self.DECORATION_COLOR))
self.numberFontColorEditor.setColor(QColor(self.NUMBER_COLOR))
self.commentFontColorEditor.setColor(QColor(self.COMMENT_COLOR))
self.commentBlockFontColorEditor.setColor(QColor(self.COMMENT_BLOCK_COLOR))
self.paperBackgroundColorEditor.setColor(QColor(self.BACKGROUND_COLOR))
self.cursorColorEditor.setColor(QColor(self.CURSOR_COLOR))
self.caretLineColorEditor.setColor(QColor(self.CARET_LINE_COLOR))
self.singleQuoteFontColorEditor.setColor(QColor(self.SINGLE_QUOTE_COLOR))
self.doubleQuoteFontColorEditor.setColor(QColor(self.DOUBLE_QUOTE_COLOR))
self.tripleSingleQuoteFontColorEditor.setColor(QColor(self.TRIPLE_SINGLE_QUOTE_COLOR))
self.tripleDoubleQuoteFontColorEditor.setColor(QColor(self.TRIPLE_DOUBLE_QUOTE_COLOR))
self.marginBackgroundColorEditor.setColor(QColor(self.MARGIN_BACKGROUND_COLOR))
self.marginForegroundColorEditor.setColor(QColor(self.MARGIN_FOREGROUND_COLOR))
self.selectionBackgroundColorEditor.setColor(QColor(self.SELECTION_BACKGROUND_COLOR))
self.selectionForegroundColorEditor.setColor(QColor(self.SELECTION_FOREGROUND_COLOR))
self.matchedBraceBackgroundColorEditor.setColor(QColor(self.MATCHED_BRACE_BACKGROUND_COLOR))
self.matchedBraceForegroundColorEditor.setColor(QColor(self.MATCHED_BRACE_FOREGROUND_COLOR))
self.edgeColorEditor.setColor(QColor(self.EDGE_COLOR))
self.foldColorEditor.setColor(QColor(self.FOLD_COLOR))
def reject(self):
self.restoreSettings()
QDialog.reject(self)
| gpl-2.0 | 4,391,739,771,873,995,000 | 63.645161 | 167 | 0.715818 | false | 4.255422 | false | false | false |
zeqing-guo/SPAKeyManager | MergeServer/views.py | 1 | 2698 | import time
from django.shortcuts import render
from django.http import HttpResponse, Http404
from MergeServer.models import Results
from KeyManager.Util.Paillier import mul, add
from KeyManager.Util.TimeCost import SPATime
from CommunicationServer.models import Transaction
# Create your views here.
def get_merge_response(request):
start = time.time()
if request.method != 'POST':
return Http404
values = request.POST['values'].split(',')
requester = request.POST['requester']
requester_number = long(requester)
respondent = request.POST['respondent']
paillier_n = long(request.POST['paillier_n'])
n_square = paillier_n * paillier_n
paillier_g = long(request.POST['paillier_g'])
spa_policies = request.POST['spa_policies'].split(',')
settings = request.POST['settings']
size = len(spa_policies)
results = Results.objects.filter(requester=requester_number)
if results.exists():
result = results[0].values.split(',')
count = results[0].count
print "count: " + str(count)
new_result = []
for i in range(size):
if spa_policies[i] == 'Average':
[this_value, this_weight] = result[i].split(' ')
[sum_value, sum_weight] = values[i].split(' ')
this_result = str(add(long(this_value), long(sum_value), n_square)) \
+ ' ' + str(long(this_weight) + long(sum_weight))
new_result.append(this_result)
if spa_policies[i] == 'MaximumValue' or spa_policies[i] == 'MinimumValue':
this_result = result[i] + ' ' + values[i]
new_result.append(this_result)
if spa_policies[i] == 'MajorityPreferred' or spa_policies[i] == 'MinorityPreferred':
this_result = []
this_values = values[i].split(' ')
result_element = result[i].split(' ')
this_size = len(result_element)
for j in range(this_size):
ans = add(long(this_values[j]), long(result_element[j]), n_square)
this_result.append(str(ans))
new_result.append(' '.join(this_result))
results.update(values=','.join(new_result), count=count + 1)
end = time.time()
SPATime(end - start)
else:
r = Results(values=request.POST['values']
, requester=requester_number
, spa_policies=request.POST['spa_policies']
, n_square=n_square
, settings=settings)
r.save()
end = time.time()
SPATime(end - start)
return HttpResponse('ok')
| gpl-3.0 | -5,128,217,341,293,719,000 | 38.676471 | 96 | 0.575982 | false | 3.843305 | false | false | false |
ludwig-n/fchess | old_code.py | 1 | 21297 | # def comet(mvs, pcs, recdepth, maxrecdepth):
# options = []
# curxp = xp_diff(mvs[0][0].color, pcs)
# intro = '// ' + ('-' * (recdepth * 2)) + ' COMET: '
# if DEBUG_OUTPUT:
# print(intro + '{} opts'.format(len(mvs)))
# else:
# print(' ' * recdepth + '*')
# if len(mvs) == 1:
# tmp, xp, oma = can_move_piece(mvs[0][0], mvs[0][1][0], mvs[0][1][1], pcs, aimode=True)
# if DEBUG_OUTPUT:
# print(intro + 'one opt ({} -> {}) - xp {}'.format(num_to_chess_coord(
# mvs[0][0].x,
# mvs[0][0].y),
# num_to_chess_coord(
# mvs[0][1][0],
# mvs[0][1][1]),
# xp))
# return 0, xp
# if recdepth == maxrecdepth:
# if DEBUG_OUTPUT:
# print(intro + 'unable to continue analysis; maximum recursion depth exceeded;'
# ' xp set to current xp ({})'.format(curxp))
# return 0, curxp
# for m in mvs:
# if m[0].type != KING:
# break
# else:
# # Check if possible to check or checkmate
# for n in range(len(mvs)):
# tmp, xp, oma = can_move_piece(mvs[n][0], mvs[n][1][0], mvs[n][1][1], pcs, aimode=True)
# if type(oma) == bool and oma:
# return n, xp
#
# for n in range(len(mvs)):
# tmp, xp, oma = can_move_piece(mvs[n][0], mvs[n][1][0], mvs[n][1][1], pcs, aimode=True)
# if xp - curxp >= 2:
# return n, xp
# # / Check if possible to check or checkmate /
# myking = m[0]
# for p in pcs:
# if p.color != myking.color and p.type == KING:
# otherking = p
# if myking.resist >= 6:
# mv = select_move_toward(myking, otherking, mvs)
# else:
# mv = select_move_away(myking, otherking, mvs)
# a, x, c = can_move_piece(myking, mvs[mv][1][0], mvs[mv][1][1], pcs, aimode=True)
# return mv, x
# for i in range(len(mvs)):
# tmp, xp, oma = can_move_piece(mvs[i][0], mvs[i][1][0], mvs[i][1][1], pcs, aimode=True)
# if type(oma) == bool and oma:
# turn = i
# if DEBUG_OUTPUT:
# print(intro + 'opt {} ({} -> {}) leads to victory, xp counted as inf; exiting analysis'.format(i,
# num_to_chess_coord(
# mvs[i][0].x,
# mvs[i][0].y),
# num_to_chess_coord(
# mvs[i][1][0],
# mvs[i][1][1])))
# xp = INF
# return i, INF
#
# elif type(oma) != bool and len(oma) == 1:
# bpcs = []
# for p in pcs:
# bpcs.append(copy.deepcopy(p))
#
# if DEBUG_OUTPUT:
# print(intro + 'analyzing our opt {} ({} -> {})...'.format(i,
# num_to_chess_coord(
# mvs[i][0].x,
# mvs[i][0].y),
# num_to_chess_coord(
# mvs[i][1][0],
# mvs[i][1][1])))
#
# move_piece(mvs[i][0], mvs[i][1][0], mvs[i][1][1], bpcs)
# if DEBUG_OUTPUT:
# print(intro + 'fc; one opponent opt ({} -> {})'.format(num_to_chess_coord(
# oma[0][0].x,
# oma[0][0].y),
# num_to_chess_coord(
# oma[0][1][0],
# oma[0][1][1]
# )))
# move_piece(oma[0][0], oma[0][1][0], oma[0][1][1], bpcs)
#
# newmv = get_all_moves(mvs[0][0].color, bpcs)
# if type(newmv) != bool:
# tmptmp, xp = comet(newmv, bpcs, recdepth + 1, maxrecdepth)# if maxrecdepth - recdepth >= 1 else -1, curxp
# if DEBUG_OUTPUT:
# print(intro + 'analysis of opt {} finished; xp {}'.format(i, xp))
# if xp == INF:
# if DEBUG_OUTPUT:
# print(intro + 'checkmate detected, exiting analysis')
# return i, INF
# else:
# if DEBUG_OUTPUT:
# print(intro + 'opt {} leads to defeat/stalemate, xp counted as -inf'.format(i))
# xp = -INF
#
# elif type(oma) != bool and get_piece_by_coords(oma[0][1][0], oma[0][1][1], pcs) is not None:
# bpcs = []
# for p in pcs:
# bpcs.append(copy.deepcopy(p))
#
# if DEBUG_OUTPUT:
# print(intro + 'analyzing opt {} ({} -> {})...'.format(i,
# num_to_chess_coord(
# mvs[i][0].x,
# mvs[i][0].y),
# num_to_chess_coord(
# mvs[i][1][0],
# mvs[i][1][1])))
#
# move_piece(mvs[i][0], mvs[i][1][0], mvs[i][1][1], bpcs)
# if DEBUG_OUTPUT:
# print(intro + 'fc; {} opponent opts'.format(len(oma)))
# xps = []
# for q in range(len(oma)):
# nbpcs = []
# for p in bpcs:
# nbpcs.append(copy.deepcopy(p))
# if DEBUG_OUTPUT:
# print(intro + 'analyzing opponent opt {} ({} -> {})'.format(q, num_to_chess_coord(
# oma[0][0].x,
# oma[0][0].y),
# num_to_chess_coord(
# oma[0][1][0],
# oma[0][1][1]
# )))
# move_piece(oma[q][0], oma[q][1][0], oma[q][1][1], nbpcs)
#
# newmv = get_all_moves(mvs[0][0].color, nbpcs)
# if type(newmv) != bool:
# if maxrecdepth - recdepth >= 1:
# t, xpn = comet(newmv, nbpcs, recdepth + 1, maxrecdepth)
# if DEBUG_OUTPUT:
# print(intro + 'analysis of opponent opt {} finished; xp {}'.format(q, xpn))
# else:
# xpn = curxp
# if DEBUG_OUTPUT:
# print(intro + 'unable to analyze opponent opt {}; maximum recursion depth exceeded;'
# ' xp set to current xp ({})'.format(q, xpn))
#
# else:
# if DEBUG_OUTPUT:
# print(intro + 'opponent opt {} leads to defeat/stalemate, xp counted as -inf'.format(q))
# xpn = -INF
#
# xps.append(xpn)
#
# xp = min(xps)
# if DEBUG_OUTPUT:
# print(intro + 'analysis of opt {} finished, final possible xps {}'.format(i, xps))
# print(intro + 'min xp {}'.format(xp))
#
# # elif type(oma) != bool and len(oma) == 2:
# # bpcs = []
# # for p in pcs:
# # bpcs.append(copy.deepcopy(p))
# #
# # if DEBUG_OUTPUT:
# # print(
# # intro + 'semi-analyzing opt {} ({} -> {})...'.format(i,
# # num_to_chess_coord(
# # mvs[
# # i][0].x,
# # mvs[
# # i][0].y),
# # num_to_chess_coord(
# # mvs[
# # i][
# # 1][
# # 0],
# # mvs[
# # i][
# # 1][
# # 1])))
# #
# # move_piece(mvs[i][0], mvs[i][1][0], mvs[i][1][1], bpcs)
# # t, xp = comet(oma, bpcs, -1, -1)
# # move_piece(oma[t][0], oma[t][1][0], oma[t][1][1], bpcs)
# # xp = xp_sum(mvs[0][0].color, bpcs)
# # if DEBUG_OUTPUT:
# # print(intro + 'semi-analysis of opt {} finished; xp {}'.format(i, xp))
#
# elif DEBUG_OUTPUT:
# print(intro + 'opt {} ({} -> {}) - not fc, xp {}'.format(i,
# num_to_chess_coord(mvs[i][0].x, mvs[i][0].y),
# num_to_chess_coord(mvs[i][1][0], mvs[i][1][1]),
# xp))
# options.append(xp)
# else:
# m = max(options)
# turns = [i for i in range(len(options)) if options[i] == m]
# turn = random.choice(turns)
# if DEBUG_OUTPUT:
# print(intro + 'final opts {}'.format(str(options).replace('100000000000000000000', 'inf')))
#
# if DEBUG_OUTPUT:
# print(intro + 'selected opt {}'.format(turn))
#
# return turn, max(options)
# def get_piece_by_coords(x, y, pieces):
# for piece in pieces:
# if piece.x == x and piece.y == y:
# return piece
# return None
#
# def get_index_by_coords(x, y, pieces):
# for i in range(len(pieces)):
# if pieces[i].x == x and pieces[i].y == y:
# return i
# return None
# def get_moves_by_offset(diags, x, y, board):
# stopped = [False] * 4
# ret = []
#
# for i in range(1, max(BOARD_X, BOARD_Y)):
# for d in range(4):
# if not stopped[d]:
# p = board[x + diags[d][0] * i, y + diags[d][1] * i]
# if p is not None:
# stopped[d] = True
# if p.color != self.color:
# ret.append((p.x, p.y))
# else:
# ret.append((self.x + diags[d][0] * i, self.y + diags[d][1] * i))
# return ret
# def is_check(color, pieces):
# return get_all_moves(not color, pieces, has_king_cpt=True)
#
# def is_under_attack_of(piece1, piece2, pieces):
# allm = get_all_moves(True, pieces) + get_all_moves(False, pieces)
# allm = [x for x in allm if x[0] == piece1 and x[1][0] == piece2.x and x[1][1] == piece2.y]
# return not len(allm) == 0
#
# def can_move_piece(piece, x2, y2, pieces, aimode=False):
# pieces_back = []
#
# for p in pieces:
# pieces_back.append(copy.deepcopy(p))
#
# p1 = get_index_by_coords(piece.x, piece.y, pieces_back)
# p2 = get_index_by_coords(x2, y2, pieces_back)
#
# xp = 0
#
# if p1 is None:
# raise Exception('No such piece')
# if p2 is not None:
# xp += pieces_back[p2].type.capture_price
# pieces_back.pop(p2)
# if p1 > p2:
# p1 -= 1
#
# pieces_back[p1].x = x2
# pieces_back[p1].y = y2
#
# ret = not is_check(pieces_back[p1].color, pieces_back)
#
# xp += CHECK_XP if is_check(not piece.color, pieces_back) else 0
# xp = TURN_XP if xp == 0 else xp
# pieces_back[p1].add_xp(xp)
#
# total_xp = xp_diff(piece.color, pieces_back)
#
# if aimode:
# return ret, total_xp, get_all_moves(not piece.color, pieces_back) # total_xp = difference between sum of xp of pcs
# # of this color and pieces of the opp color
# else:
# return ret
#
# def move_piece(piece, x2, y2, pieces):
# global pawn_prom
# p1 = get_index_by_coords(piece.x, piece.y, pieces)
# p2 = get_index_by_coords(x2, y2, pieces)
#
# xpsum = 0
#
# if p1 is None:
# raise Exception('No such piece')
# if p1 == p2:
# raise Exception('Can\'t move piece to previous location')
# if p2 is not None:
# xpsum += pieces[p2].type.capture_price
# pieces.pop(p2)
# if p1 > p2:
# p1 -= 1
#
# if pieces[p1].type == PAWN and pawn_prom:
# print(LANGUAGE.phrases.PROMOTION_CHOICE)
# typ = input()
# if typ == '1':
# pieces[p1] = Piece(p1.color, ROOK, x2, y2, moved=True)
# elif typ == '2':
# pieces[p1] = Piece(p1.color, BISHOP, x2, y2, moved=True)
# elif typ == '3':
# pieces[p1] = Piece(p1.color, KNIGHT, x2, y2, moved=True)
# else:
# pieces[p1] = Piece(p1.color, QUEEN, x2, y2, moved=True)
# else:
# pieces[p1].x = x2
# pieces[p1].y = y2
#
# xpsum += CHECK_XP if is_check(not piece.color, pieces) else 0
# xpsum = TURN_XP if xpsum == 0 else xpsum
# pieces[p1].add_xp(xpsum)
#
# def get_all_moves(color, pieces, has_king_cpt=False, has_mob_cpt=False):
# ret = []
# captures = []
# res = []
# for p in pieces:
# if p.color == color:
# rt, cpt = p.get_moves(pieces)
# ret.extend([(p, x) for x in rt])
# captures.extend([(p, x) for x in cpt])
# for x, y in cpt:
# if get_piece_by_coords(x, y, pieces).type == KING:
# return True
# if get_piece_by_coords(x, y, pieces).type == MOB and has_mob_cpt:
# return True
#
# if has_king_cpt or has_mob_cpt:
# return False
#
# # --- Check all capture variants for checks
# popped = []
#
# for i in range(len(captures)):
# b = can_move_piece(captures[i][0], captures[i][1][0], captures[i][1][1], pieces)
# if not b:
# popped.append(captures[i])
#
# for p in popped:
# captures.remove(p)
#
# if len(captures) == 0:
# # --- Same with ret
# popped = []
#
# for i in range(len(ret)):
# b = can_move_piece(ret[i][0], ret[i][1][0], ret[i][1][1], pieces)
# if not b:
# popped.append(ret[i])
#
# for p in popped:
# ret.remove(p)
#
# res = ret
# else:
# res = captures
#
# if len(res) == 0:
# return is_check(color, pieces)
# else:
# return res
# def change_back(v):
# global curon
#
# v = False
# if v:
# curon = not curon
#
# if curon:
# print('\033[47m', end='')
# else:
# print('\033[0m', end='')
# def power(type):
# if type == PAWN:
# return 1
# if type == KNIGHT:
# return 2
# if type == BISHOP:
# return 3
# if type == ROOK:
# return 4
# if type == QUEEN:
# return 5
# if type == KING:
# return 6
# if type == AMAZON:
# return 7
#
# def select_move_toward(p1, p2, mvs):
# xd, yd = abs(p1.x - p2.x), abs(p1.y - p2.y)
# resindex = -1
# resval = -INF
# for m in range(len(mvs)):
# nx, ny = abs(mvs[m][1][0] - p2.x), abs(mvs[m][1][1] - p2.y)
# change = xd - nx + yd - ny
# if change > resval:
# resval = change
# resindex = m
#
# return resindex
#
# def select_move_away(p1, p2, mvs):
# xd, yd = abs(p1.x - p2.x), abs(p1.y - p2.y)
# resindex = -1
# resval = -INF
# for m in range(len(mvs)):
# nx, ny = abs(mvs[m][1][0] - p2.x), abs(mvs[m][1][1] - p2.y)
# change = nx - xd + ny - yd
# if change > resval:
# resval = change
# resindex = m
#
# return resindex
#
# def escape_capture_of(mob, piece1, pieces):
# variants = [(-1, 0), (1, 0), (0, -1), (0, 1)]
# ret = []
# for v in variants:
# pieces_back = []
# for p in pieces:
# pieces_back.append(copy.deepcopy(p))
# move_piece(mob, mob.x + v[0], mob.y + v[1], pieces_back)
# if not is_under_attack_of(piece1, mob, pieces_back):
# ret.append(v)
# return ret
#
# def get_all_mobs(pieces):
# return [x for x in pieces if x.type == MOB]
#
# def can_move_piece(self, p1, x, y, return_details=False):
# board_c = copy.copy(self)
#
# p2 = self[x, y]
# board_c.pieces.pop((self[p1].x, self[p1].y))
#
# xp = 0
#
# if p1 is None:
# raise Exception('No such piece')
# if p2 is not None:
# xp += board_c.pieces_l[p2].type.capture_price
# self.remove(p2)
# if p1 > p2:
# p1 -= 1
#
# board_c[p1].x = x
# board_c[p1].y = y
#
# ret = not self.is_check(board_c.pieces_l[p1].color)
#
# xp += CHECK_XP if self.is_check(not board_c.pieces_l[p1].color) else 0
# xp = TURN_XP if xp == 0 else xp
# board_c.pieces_l[p1].add_xp(xp)
#
# total_xp = board_c.xp_diff(board_c.pieces_l[p1].color)
#
# if return_details:
# mv, cpt = self.get_all_moves()
# return ret, total_xp, mv[int(not board_c.pieces_l[p1].color)], cpt[int(not board_c.pieces_l[p1].color)]
# # total_xp = difference between sum of xp of pcs
# # of this color and pieces of the opp color
# else:
# return ret
#
# def move_to_chess(piece, x, y, pieces):
# if piece.type == PAWN:
# if piece.x == x:
# ret = num_to_chess_coord(x, y)
# else:
# ret = LINES[piece.x] + LINES[x]
# else:
# if get_piece_by_coords(x, y, pieces) is None:
# ret = piece.type.abbr + num_to_chess_coord(x, y)
# else:
# ret = piece.type.abbr + ':' + num_to_chess_coord(x, y)
# return ret
#
# bool1 = len(t) != 2 or len(t[0]) != 2 or len(t[1]) != 2 or \
# t[0][0] not in LINES[:BOARD_X] or t[1][0] not in LINES[:BOARD_X] or \
# t[0][1] not in string.digits or t[1][1] not in string.digits | gpl-3.0 | 4,026,633,267,642,389,500 | 42.372917 | 124 | 0.357562 | false | 3.246989 | false | false | false |
puiterwijk/bugwarrior | tests/test_jira.py | 3 | 1728 | import mock
from bugwarrior.services.jira import JiraService
from .base import ServiceTest
class TestJiraIssue(ServiceTest):
SERVICE_CONFIG = {
'jira.username': 'one',
'jira.base_uri': 'two',
'jira.password': 'three',
}
def setUp(self):
with mock.patch('jira.client.JIRA._get_json'):
self.service = self.get_mock_service(JiraService)
def test_to_taskwarrior(self):
arbitrary_project = 'DONUT'
arbitrary_id = '10'
arbitrary_url = 'http://one'
arbitrary_summary = 'lkjaldsfjaldf'
arbitrary_record = {
'fields': {
'priority': 'Blocker',
'summary': arbitrary_summary,
},
'key': '%s-%s' % (arbitrary_project, arbitrary_id, ),
}
arbitrary_extra = {
'jira_version': 5,
'annotations': ['an annotation'],
}
issue = self.service.get_issue_for_record(
arbitrary_record, arbitrary_extra
)
expected_output = {
'project': arbitrary_project,
'priority': (
issue.PRIORITY_MAP[arbitrary_record['fields']['priority']]
),
'annotations': arbitrary_extra['annotations'],
'tags': [],
issue.URL: arbitrary_url,
issue.FOREIGN_ID: arbitrary_record['key'],
issue.SUMMARY: arbitrary_summary,
issue.DESCRIPTION: None,
}
def get_url(*args):
return arbitrary_url
with mock.patch.object(issue, 'get_url', side_effect=get_url):
actual_output = issue.to_taskwarrior()
self.assertEqual(actual_output, expected_output)
| gpl-3.0 | 4,223,890,027,943,420,400 | 27.8 | 74 | 0.542824 | false | 4.075472 | false | false | false |
GeorgeArgyros/symautomata | symautomata/pdastring.py | 1 | 26889 | """
This module retrieves a simple string from a PDA
using the state removal method
"""
from pda import PDAState
class PdaString():
"""Retrieves a string from a PDA"""
def __init__(self):
"""Class Initialization"""
self.statediag = []
self.quickresponse = {}
self.quickresponse_types = {}
pass
def _combine_rest_push(self):
"""Combining Rest and Push States"""
new = []
change = 0
# DEBUG
# logging.debug('Combining Rest and Push')
i = 0
examinetypes = self.quickresponse_types[3]
for state in examinetypes:
if state.type == 3:
for nextstate_id in state.trans.keys():
found = 0
# if nextstate_id != state.id:
if nextstate_id in self.quickresponse:
examines = self.quickresponse[nextstate_id]
for examine in examines:
if examine.id == nextstate_id and examine.type == 1:
temp = PDAState()
temp.type = 1
temp.sym = examine.sym
temp.id = state.id
for nextnextstate_id in examine.trans:
# if nextnextstate_id != examine.id :
for x_char in state.trans[nextstate_id]:
for z_char in examine.trans[
nextnextstate_id]:
if nextnextstate_id not in temp.trans:
temp.trans[
nextnextstate_id] = []
if x_char != 0 and z_char != 0:
temp.trans[
nextnextstate_id].append(x_char + z_char)
# DEBUGprint 'transition is now
# '+x_char +' + '+ z_char
elif x_char != 0 and z_char == 0:
temp.trans[
nextnextstate_id].append(x_char)
# DEBUGprint 'transition is now
# '+x_char
elif x_char == 0 and z_char != 0:
temp.trans[
nextnextstate_id].append(z_char)
# DEBUGprint 'transition is now
# '+z_char
elif x_char == 0 and z_char == 0:
temp.trans[
nextnextstate_id].append(0)
# DEBUGprint 'transition is now
# empty'
else:
pass
found = 1
new.append(temp)
if found == 1:
# print 'Lets combine one with id '+`state.id`+'(rest)
# and one with id '+`nextstate_id`+'(push)'
change = 1
# del(state.trans[nextstate_id])
i = i + 1
if change == 0:
return []
else:
return new
def _combine_push_rest(self):
"""Combining Push and Rest"""
new = []
change = 0
# DEBUG
# logging.debug('Combining Push and Rest')
i = 0
examinetypes = self.quickresponse_types[1]
for state in examinetypes:
if state.type == 1:
for nextstate_id in state.trans.keys():
found = 0
# if nextstate_id != state.id:
if nextstate_id in self.quickresponse:
examines = self.quickresponse[nextstate_id]
for examine in examines:
if examine.id == nextstate_id and examine.type == 3:
temp = PDAState()
temp.type = 1
temp.sym = state.sym
temp.id = state.id
for nextnextstate_id in examine.trans:
# if nextnextstate_id != examine.id :
for x_char in state.trans[nextstate_id]:
for z_char in examine.trans[
nextnextstate_id]:
if nextnextstate_id not in temp.trans:
temp.trans[
nextnextstate_id] = []
if x_char != 0 and z_char != 0:
temp.trans[
nextnextstate_id].append(x_char + z_char)
# DEBUGprint 'transition is now
# '+x_char +' + '+ z_char
elif x_char != 0 and z_char == 0:
temp.trans[
nextnextstate_id].append(x_char)
# DEBUGprint 'transition is now
# '+x_char
elif x_char == 0 and z_char != 0:
temp.trans[
nextnextstate_id].append(z_char)
# DEBUGprint 'transition is now
# '+z_char
elif x_char == 0 and z_char == 0:
temp.trans[
nextnextstate_id].append(0)
# DEBUGprint 'transition is now
# empty'
else:
pass
found = 1
new.append(temp)
if found == 1:
# DEBUGprint 'Lets combine one with id
# '+`state.id`+'(push) and one with id
# '+`nextstate_id`+'(rest)'
change = 1
del state.trans[nextstate_id]
i = i + 1
if change == 0:
return []
else:
return new
def _combine_pop_rest(self):
"""Combining Pop and Rest"""
new = []
change = 0
# DEBUG
# logging.debug('Combining Pop and Rest')
i = 0
examinetypes = self.quickresponse_types[2]
for state in examinetypes:
if state.type == 2:
for nextstate_id in state.trans.keys():
found = 0
# if nextstate_id != state.id:
if nextstate_id in self.quickresponse:
examines = self.quickresponse[nextstate_id]
for examine in examines:
if examine.id == nextstate_id and examine.type == 3:
if state.sym != 0:
temp = PDAState()
temp.type = 2
temp.sym = state.sym
temp.id = state.id
for nextnextstate_id in examine.trans:
# if nextnextstate_id != examine.id:
for x_char in state.trans[nextstate_id]:
for z_char in examine.trans[
nextnextstate_id]:
if nextnextstate_id not in temp.trans:
temp.trans[
nextnextstate_id] = []
if x_char != 0 and z_char != 0:
temp.trans[
nextnextstate_id].append(x_char + z_char)
# DEBUGprint 'transition is
# now '+x_char +' + '+ z_char
elif x_char != 0 and z_char == 0:
temp.trans[
nextnextstate_id].append(x_char)
# DEBUGprint 'transition is
# now '+x_char
elif x_char == 0 and z_char != 0:
temp.trans[
nextnextstate_id].append(z_char)
# DEBUGprint 'transition is
# now '+z_char
elif x_char == 0 and z_char == 0:
temp.trans[
nextnextstate_id].append(0)
# DEBUGprint 'transition is
# now empty'
else:
pass
found = 1
new.append(temp)
else:
for nextnextstate_id in examine.trans:
# if nextnextstate_id != examine.id:
for x_char in state.trans[nextstate_id]:
temp = PDAState()
temp.type = 2
temp.id = state.id
temp.sym = x_char
temp.trans[nextnextstate_id] = []
for z_char in examine.trans[
nextnextstate_id]:
if z_char != 0:
temp.trans[
nextnextstate_id].append(z_char)
# DEBUGprint 'transition is
# now '+z_char
elif z_char == 0:
temp.trans[
nextnextstate_id].append(0)
# DEBUGprint 'transition is
# now empty'
else:
pass
found = 1
new.append(temp)
if found == 1:
# DEBUGprint 'Lets combine one with id
# '+`state.id`+'(push) and one with id
# '+`nextstate_id`+'(rest)'
change = 1
del state.trans[nextstate_id]
i = i + 1
if change == 0:
return []
else:
return new
def _combine_rest_rest(self):
"""Combining Rest and Rest"""
new = []
change = 0
# DEBUG
# logging.debug('Combining Rest and Rest')
i = 0
examinetypes = self.quickresponse_types[3]
for state in examinetypes:
if state.type == 3:
found = 0
for nextstate_id in state.trans.keys():
secondfound = 0
# if nextstate_id != state.id:
if nextstate_id in self.quickresponse:
examines = self.quickresponse[nextstate_id]
for examine in examines:
if examine.id == nextstate_id and examine.type == 3:
temp = PDAState()
temp.type = 3
temp.sym = state.sym
temp.id = state.id
for nextnextstate_id in examine.trans:
if nextnextstate_id != examine.id:
for x_char in state.trans[nextstate_id]:
for z_char in examine.trans[
nextnextstate_id]:
if nextnextstate_id not in temp.trans:
temp.trans[
nextnextstate_id] = []
if x_char != 0 and z_char != 0:
temp.trans[
nextnextstate_id].append(x_char + z_char)
# DEBUGprint 'transition is
# now '+x_char +' + '+ z_char
elif x_char != 0 and z_char == 0:
temp.trans[
nextnextstate_id].append(x_char)
# DEBUGprint 'transition is
# now '+x_char
elif x_char == 0 and z_char != 0:
temp.trans[
nextnextstate_id].append(z_char)
# DEBUGprint 'transition is
# now '+z_char
elif x_char == 0 and z_char == 0:
temp.trans[
nextnextstate_id].append(0)
# DEBUGprint 'transition is
# now empty'
else:
pass
secondfound = 1
if secondfound == 1:
new.append(temp)
found = 1
if found == 1:
# DEBUGprint 'Lets combine one with id
# '+`state.id`+'(rest) and one with id
# '+`nextstate_id`+'(rest)'
change = 1
del state.trans[nextstate_id]
i = i + 1
if change == 0:
return []
else:
return new
def _combine_push_pop(self):
"""Combining Push and Pop"""
new = []
change = 0
# DEBUG
# logging.debug('Combining Push and Pop')
i = 0
examinetypes = self.quickresponse_types[1]
for state in examinetypes:
if state.type == 1:
found = 0
for nextstate_id in state.trans.keys():
# if nextstate_id != state.id:
if nextstate_id in self.quickresponse:
examines = self.quickresponse[nextstate_id]
for examine in examines:
secondfound = 0
if examine.id == nextstate_id and examine.type == 2:
temp = PDAState()
temp.type = 3
temp.sym = 0
temp.id = state.id
if examine.sym == 0:
for nextnextstate_id in examine.trans:
# if nextnextstate_id != examine.id :
for z_char in examine.trans[
nextnextstate_id]:
if state.sym == z_char:
for x_char in state.trans[
nextstate_id]:
# DEBUGprint state.sym+' vs
# '+z_char
if nextnextstate_id not in temp.trans:
temp.trans[
nextnextstate_id] = []
if x_char != 0:
temp.trans[
nextnextstate_id].append(x_char)
# DEBUGprint
# 'transition is now
# '+x_char
else:
temp.trans[
nextnextstate_id].append(0)
# DEBUGprint
# 'transition is now
# empty'
secondfound = 1
elif state.sym == examine.sym:
for nextnextstate_id in examine.trans:
# if nextnextstate_id != examine.id :
for x_char in state.trans[nextstate_id]:
for z_char in examine.trans[
nextnextstate_id]:
if nextnextstate_id not in temp.trans:
temp.trans[
nextnextstate_id] = []
if x_char != 0 and z_char != 0:
temp.trans[
nextnextstate_id].append(x_char + z_char)
# DEBUGprint 'transition is
# now '+x_char +' + '+ z_char
elif x_char != 0 and z_char == 0:
temp.trans[
nextnextstate_id].append(x_char)
# DEBUGprint 'transition is
# now '+x_char
elif x_char == 0 and z_char != 0:
temp.trans[
nextnextstate_id].append(z_char)
# DEBUGprint 'transition is
# now '+z_char
elif x_char == 0 and z_char == 0:
temp.trans[
nextnextstate_id].append(0)
# DEBUGprint 'transition is
# now empty'
else:
pass
secondfound = 1
if secondfound == 1:
new.append(temp)
found = 1
if found == 1:
# DEBUGprint 'Lets combine one with id
# '+`state.id`+'(push) and one with id
# '+`nextstate_id`+'(pop)'
change = 1
# DEBUGprint 'delete '+`nextstate_id`+' from
# '+`state.id`
del state.trans[nextstate_id]
i = i + 1
if change == 0:
return []
else:
return new
def _check(self, accepted):
"""_check for string existence"""
# logging.debug('A check is now happening...')
# for key in self.statediag[1].trans:
# logging.debug('transition to '+`key`+" with "+self.statediag[1].trans[key][0])
total = []
if 1 in self.quickresponse:
total = total + self.quickresponse[1]
if (1, 0) in self.quickresponse:
total = total + self.quickresponse[(1, 0)]
for key in total:
if (key.id == 1 or key.id == (1, 0)) and key.type == 3:
if accepted is None:
if 2 in key.trans:
# print 'Found'
return key.trans[2]
else:
for state in accepted:
if (2, state) in key.trans:
# print 'Found'
return key.trans[(2, state)]
return -1
def _stage(self, accepted, count=0):
"""This is a repeated state in the state removal algorithm"""
new5 = self._combine_rest_push()
new1 = self._combine_push_pop()
new2 = self._combine_push_rest()
new3 = self._combine_pop_rest()
new4 = self._combine_rest_rest()
new = new1 + new2 + new3 + new4 + new5
del new1
del new2
del new3
del new4
del new5
if len(new) == 0:
# self.printer()
# print 'PDA is empty'
# logging.debug('PDA is empty')
return None
self.statediag = self.statediag + new
del new
# print 'cleaning...'
# It is cheaper to create a new array than to use the old one and
# delete a key
newstates = []
for key in self.statediag:
if len(key.trans) == 0 or key.trans == {}:
# rint 'delete '+`key.id`
# self.statediag.remove(key)
pass
else:
newstates.append(key)
del self.statediag
self.statediag = newstates
self.quickresponse = {}
self.quickresponse_types = {}
self.quickresponse_types[0] = []
self.quickresponse_types[1] = []
self.quickresponse_types[2] = []
self.quickresponse_types[3] = []
self.quickresponse_types[4] = []
for state in self.statediag:
if state.id not in self.quickresponse:
self.quickresponse[state.id] = [state]
else:
self.quickresponse[state.id].append(state)
self.quickresponse_types[state.type].append(state)
# else:
# print `key.id`+' (type: '+`key.type`+' and sym:'+`key.sym`+')'
# print key.trans
# print 'checking...'
exists = self._check(accepted)
if exists == -1:
# DEBUGself.printer()
# raw_input('next step?')
return self._stage(accepted, count + 1)
else:
# DEBUGself.printer()
# print 'Found '
print exists
# return self._stage(accepted, count+1)
return exists
def printer(self):
"""Visualizes the current state"""
for key in self.statediag:
if key.trans is not None and len(key.trans) > 0:
print '****** ' + repr(key.id) + '(' + repr(key.type)\
+ ' on sym ' + repr(key.sym) + ') ******'
print key.trans
def init(self, states, accepted):
"""Initialization of the indexing dictionaries"""
self.statediag = []
for key in states:
self.statediag.append(states[key])
self.quickresponse = {}
self.quickresponse_types = {}
self.quickresponse_types[0] = []
self.quickresponse_types[1] = []
self.quickresponse_types[2] = []
self.quickresponse_types[3] = []
self.quickresponse_types[4] = []
for state in self.statediag:
if state.id not in self.quickresponse:
self.quickresponse[state.id] = [state]
else:
self.quickresponse[state.id].append(state)
self.quickresponse_types[state.type].append(state)
# self.printer()
# raw_input('next stepA?')
return self._stage(accepted, 0)
| mit | 5,705,872,241,703,846,000 | 49.638418 | 97 | 0.322846 | false | 6.098662 | false | false | false |
msfrank/Higgins | higgins/core/manager.py | 1 | 5601 | # Higgins - A multi-media server
# Copyright (c) 2007-2009 Michael Frank <msfrank@syntaxjockey.com>
#
# This program is free software; for license information see
# the COPYING file.
import os, tempfile
from higgins.http import resource, http_headers
from higgins.http.http import Response as HttpResponse
from higgins.core.models import File, Artist, Album, Song, Genre
from higgins.core.postable_resource import PostableResource
from higgins.core.logger import CoreLogger
class UniqueFile:
def __init__(self, filename, mimetype='application/octet-stream'):
self.mimetype = mimetype
self._fd,self.path = tempfile.mkstemp(prefix=filename + '.', dir='.')
def write(self, data):
os.write(self._fd, data)
def close(self):
os.close(self._fd)
del(self._fd)
class CreateCommand(PostableResource, CoreLogger):
def acceptFile(self, headers):
content_disposition = headers.getHeader('content-disposition')
if 'filename' in content_disposition.params:
filename = content_disposition.params['filename']
else:
filename = "file"
content_type = headers.getHeader('content-type')
if isinstance(content_type, http_headers.MimeType):
mimetype = content_type.mediaType + '/' + content_type.mediaSubtype
else:
mimetype = 'application/octet-stream'
file = UniqueFile(filename, mimetype)
self.log_debug("acceptFile: created new unique file %s" % file.path);
return file
def render(self, request):
try:
# title is required
title = request.post.get('title', None)
if title == None:
return HttpResponse(400, stream="Missing required form item 'title")
is_local = request.args.get('is_local', None)
# process in local mode
if not is_local == None:
local_path = request.post.get('local_path', None)
if local_path == None:
return HttpResponse(400, stream="Missing required form item 'local_path'")
mimetype = request.post.get('mimetype', None)
if mimetype == None:
return HttpResponse(400, stream="Missing required form item 'mimetype'")
# verify that file exists at local_path
try:
s = os.stat(local_path)
except:
return HttpResponse(400, stream="Failed to stat() local file %s" % local_path)
file = File(path=local_path, mimetype=mimetype, size=s.st_size)
file.save()
else:
nfiles = len(request.files)
if nfiles == 0:
return HttpResponse(400, stream="Not local mode and no file specified")
if nfiles > 1:
return HttpResponse(400, stream="More than one file specified")
posted = request.files[0]
try:
s = os.stat(posted.path)
except:
return HttpResponse(400, stream="Failed to stat() local file %s" % local_path)
file = File(path=posted.path, mimetype=posted.mimetype, size=s.st_size)
file.save()
self.log_debug("CreateCommand: created new file %s" % posted.path)
# create or get the artist object
value = request.post.get('artist', None)
if value:
artist,created = Artist.objects.get_or_create(name=value)
else:
artist,created = Artist.objects.get_or_create(name="")
artist.save()
# create or get the genre object
value = request.post.get('genre', None)
if value:
genre,created = Genre.objects.get_or_create(name=value)
else:
genre,created = Genre.objects.get_or_create(name="")
genre.save()
# create or get the album object
value = request.post.get('album', None)
if value:
album,created = Album.objects.get_or_create(name=value, artist=artist, genre=genre)
else:
album,created = Album.objects.get_or_create(name="", artist=artist, genre=genre)
album.save()
# create the song object
song = Song(name=title, album=album, artist=artist, file=file)
value = request.post.get('track', None)
if value:
song.track_number = int(value)
value = request.post.get('length', None)
if value:
song.duration = int(value)
song.save()
self.log_debug("successfully added new song '%s'" % title)
return HttpResponse(200, stream="success!")
except Exception, e:
self.log_debug("CreateCommand failed: %s" % e)
return HttpResponse(500, stream="Internal Server Error")
class UpdateCommand(resource.Resource, CoreLogger):
def render(self, request):
return HttpResponse(404)
class DeleteCommand(resource.Resource, CoreLogger):
def render(self, request):
return HttpResponse(404)
class ManagerResource(resource.Resource, CoreLogger):
def locateChild(self, request, segments):
if segments[0] == "create":
return CreateCommand(), []
if segments[0] == "update":
return UpdateCommand(), []
if segments[0] == "delete":
return DeleteCommand(), []
return None, []
| lgpl-2.1 | 7,229,447,080,898,060,000 | 40.488889 | 99 | 0.579182 | false | 4.3151 | false | false | false |
bitdeli/bitdeli-py | bitdeli/pipeline.py | 1 | 2654 | import traceback, threading, sys
class ExitPipeline(Exception):
pass
class Pipeline(object):
def __init__(self, pipeline):
self.lock = threading.Condition(threading.Lock())
self.thread = threading.Thread(target=self._run, args=(pipeline,))
self.item = None
self.error = None
self.active = False
self.exit = False
self.thread.start()
def _run(self, pipeline):
try:
for x in pipeline(self._iter()):
pass
except ExitPipeline:
pass
except:
self.error = traceback.format_exc()
# there are two ways how we may end up here:
if not self.active:
# case 1) initialization of the pipeline crashes
# before we get to 'yield item' in _iter below: We must
# wait for the main thread to call next(), so we can
# release it properly below.
self.lock.acquire()
while self.item == None:
self.lock.wait()
# case 2) crash occurs after 'yield item' moves control to
# the other stages in the pipeline: release lock as if we
# had processed the item as usual.
self.lock.notify()
self.lock.release()
def _iter(self):
while True:
self.lock.acquire()
self.active = True
while self.item == None:
self.lock.wait()
if self.exit:
if self.error:
raise ExitPipeline()
else:
return
item = self.item
self.item = None
yield item
self.lock.notify()
self.lock.release()
def next(self, item):
self.lock.acquire()
self.item = item
self.lock.notify()
self.lock.wait()
self.lock.release()
return self.error
def close(self, error):
self.lock.acquire()
self.item = True
self.exit = True
self.error = error
self.lock.notify()
self.lock.release()
self.thread.join()
return self.error
def run(source, pipelines):
def run(pipes):
for item in source:
for pipe in pipes:
error = pipe.next(item)
if error:
return error
return False
pipes = [Pipeline(p) for p in pipelines]
error = run(pipes)
for pipe in pipes:
error = pipe.close(error)
if error:
sys.stderr.write(error)
return False
return True
| mit | -7,850,849,799,641,166,000 | 28.820225 | 74 | 0.515448 | false | 4.560137 | false | false | false |
mafagafogigante/algorithms-and-data-structures | python/maximum-subarray/maximum-subarray.py | 1 | 3211 | # This are my implementations of two algorithms that find a maximum subarray
# (a subarray whose sum is maximum).
#
# It is also possible to find the maximum subarray by testing C(n, 2) pairs.
# This is the brute-force approach and it is O(n^2). Don't use this.
#
# The first one finds the maximum subarray using D&C in O(n lg n).
#
# The second one can find the maximum subarray in O(n).
# It is known as Kadane's algorithm.
def find_maximum_crossing_subarray(array, start, end):
"""
Finds the maximum middle-crossing subarray between start and end, inclusive.
Returns a tuple of the form (left index, right index, sum).
"""
mid = (start + end) // 2
cur_l_sum = 0
max_l_sum = None
max_l_index = mid
for i in range(mid, start - 1, -1):
cur_l_sum += array[i]
if max_l_sum is None or cur_l_sum > max_l_sum:
max_l_sum = cur_l_sum
max_l_index = i
cur_r_sum = 0
max_r_sum = 0
max_r_index = mid
# As the left sum includes the middle element, the right sum does not.
for i in range(mid + 1, end + 1):
cur_r_sum += array[i]
if cur_r_sum > max_r_sum:
max_r_sum = cur_r_sum
max_r_index = i
return (max_l_index, max_r_index, max_l_sum + max_r_sum)
def find_maximum_subarray(array, start, end):
"""
Finds the maximum subarray between start and end (inclusive).
Returns a tuple of the form (left index, right index, sum).
"""
if start == end:
return (start, end, array[start])
else:
mid = (start + end) // 2
l = find_maximum_subarray(array, start, mid)
m = find_maximum_crossing_subarray(array, start, end)
r = find_maximum_subarray(array, mid + 1, end)
if l[2] >= r[2] and l[2] >= m[2]:
return l
elif r[2] >= l[2] and r[2] >= m[2]:
return r
else:
return m
def find_maximum_subarray_by_kadane(array, start, end):
max_ending_here = array[start]
max_ending_here_l = start
max_so_far = array[start]
max_so_far_l = start
max_so_far_r = start
for i in range(start + 1, end + 1):
if array[i] > max_ending_here + array[i]:
max_ending_here = array[i]
max_ending_here_l = i
else:
max_ending_here += array[i]
if max_ending_here > max_so_far:
max_so_far = max_ending_here
max_so_far_l = max_ending_here_l
max_so_far_r = i
return (max_so_far_l, max_so_far_r, max_so_far)
if __name__ == '__main__':
array_a = [1]
array_b = [-1]
array_c = [10, -20]
array_d = [-15, 20, 40, -10, 15]
print(find_maximum_subarray(array_a, 0, len(array_a) - 1))
print(find_maximum_subarray(array_b, 0, len(array_b) - 1))
print(find_maximum_subarray(array_c, 0, len(array_c) - 1))
print(find_maximum_subarray(array_d, 0, len(array_d) - 1))
print(find_maximum_subarray_by_kadane(array_a, 0, len(array_a) - 1))
print(find_maximum_subarray_by_kadane(array_b, 0, len(array_b) - 1))
print(find_maximum_subarray_by_kadane(array_c, 0, len(array_c) - 1))
print(find_maximum_subarray_by_kadane(array_d, 0, len(array_d) - 1))
| unlicense | 2,358,794,061,456,667,600 | 34.677778 | 80 | 0.584865 | false | 2.935101 | false | false | false |
SteveParrington/jukeboxify | server/jukeboxify_cli.py | 1 | 1300 | import zmq
from getpass import getpass
def jsonify(text_command):
tokens = text_command.split()
args = []
if len(tokens) > 1:
args = tokens[1:]
json = { "opcode": tokens[0], "args": args }
return json
def login_prompt():
login_payload = {
"opcode": "login",
"args": [
raw_input("Username: "),
getpass()
]
}
return login_payload
def enter_repl(socket):
print("Jukeboxify CLI - Developed by Steve Parrington")
try:
while True:
text_command = raw_input("> ")
json = jsonify(text_command)
if json['opcode'] == 'exit':
raise KeyboardInterrupt
elif json['opcode'] == 'login':
json = login_prompt()
socket.send_json(json)
response = socket.recv_json()
if "message" in response:
print(response["message"])
else:
print(response)
except KeyboardInterrupt:
print("Exiting Jukeboxify CLI...")
socket.disconnect('tcp://127.0.0.1:7890')
def main():
context = zmq.Context.instance()
socket = context.socket(zmq.REQ)
socket.connect('tcp://127.0.0.1:7890')
enter_repl(socket)
if __name__ == '__main__':
main()
| apache-2.0 | -735,574,770,677,182,100 | 25.530612 | 59 | 0.532308 | false | 3.927492 | false | false | false |
morinted/plover | plover/machine/txbolt.py | 4 | 3417 | # Copyright (c) 2011 Hesky Fisher
# See LICENSE.txt for details.
"Thread-based monitoring of a stenotype machine using the TX Bolt protocol."
import plover.machine.base
# In the TX Bolt protocol, there are four sets of keys grouped in
# order from left to right. Each byte represents all the keys that
# were pressed in that set. The first two bits indicate which set this
# byte represents. The next bits are set if the corresponding key was
# pressed for the stroke.
# 00XXXXXX 01XXXXXX 10XXXXXX 110XXXXX
# HWPKTS UE*OAR GLBPRF #ZDST
# The protocol uses variable length packets of one, two, three or four
# bytes. Only those bytes for which keys were pressed will be
# transmitted. The bytes arrive in order of the sets so it is clear
# when a new stroke starts. Also, if a key is pressed in an earlier
# set in one stroke and then a key is pressed only in a later set then
# there will be a zero byte to indicate that this is a new stroke. So,
# it is reliable to assume that a stroke ended when a lower set is
# seen. Additionally, if there is no activity then the machine will
# send a zero byte every few seconds.
STENO_KEY_CHART = ("S-", "T-", "K-", "P-", "W-", "H-", # 00
"R-", "A-", "O-", "*", "-E", "-U", # 01
"-F", "-R", "-P", "-B", "-L", "-G", # 10
"-T", "-S", "-D", "-Z", "#") # 11
class TxBolt(plover.machine.base.SerialStenotypeBase):
"""TX Bolt interface.
This class implements the three methods necessary for a standard
stenotype interface: start_capture, stop_capture, and
add_callback.
"""
KEYS_LAYOUT = '''
# # # # # # # # # #
S- T- P- H- * -F -P -L -T -D
S- K- W- R- * -R -B -G -S -Z
A- O- -E -U
'''
def __init__(self, params):
super().__init__(params)
self._reset_stroke_state()
def _reset_stroke_state(self):
self._pressed_keys = []
self._last_key_set = 0
def _finish_stroke(self):
steno_keys = self.keymap.keys_to_actions(self._pressed_keys)
if steno_keys:
self._notify(steno_keys)
self._reset_stroke_state()
def run(self):
"""Overrides base class run method. Do not call directly."""
settings = self.serial_port.getSettingsDict()
settings['timeout'] = 0.1 # seconds
self.serial_port.applySettingsDict(settings)
self._ready()
while not self.finished.isSet():
# Grab data from the serial port, or wait for timeout if none available.
raw = self.serial_port.read(max(1, self.serial_port.inWaiting()))
if not raw:
# Timeout, finish the current stroke.
self._finish_stroke()
continue
for byte in raw:
key_set = byte >> 6
if key_set <= self._last_key_set:
# Starting a new stroke, finish previous one.
self._finish_stroke()
self._last_key_set = key_set
for i in range(5 if key_set == 3 else 6):
if (byte >> i) & 1:
key = STENO_KEY_CHART[(key_set * 6) + i]
self._pressed_keys.append(key)
if key_set == 3:
# Last possible set, the stroke is finished.
self._finish_stroke()
| gpl-2.0 | -7,353,870,886,692,571,000 | 36.966667 | 84 | 0.566579 | false | 3.662379 | false | false | false |
Petr-Kovalev/nupic-win32 | examples/prediction/data/extra/hotgym/raw/makeDataset.py | 2 | 4119 | #! /usr/bin/env python
# ----------------------------------------------------------------------
# Numenta Platform for Intelligent Computing (NuPIC)
# Copyright (C) 2013, Numenta, Inc. Unless you have purchased from
# Numenta, Inc. a separate commercial license for this software code, the
# following terms and conditions apply:
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License version 3 as
# published by the Free Software Foundation.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.
# See the GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see http://www.gnu.org/licenses.
#
# http://numenta.org/licenses/
# ----------------------------------------------------------------------
"""Unify the various HotGym CSV files to a single coherent StandardFile
See README.txt for details
"""
import os
import sys
import glob
import operator
import datetime
"""from nupic.providers.WeatherProvider import (
WeatherStation,
getClosestStation)
"""
from nupic.data.file import File
def fixEOL(f):
"""Make sure the end of line character is '\n'
This is needed in order to use fileinput.input() to process the files. The
file format of the raw gym dataset unfortunately contains \r (old Mac format)
EOL characters.
"""
text = open(f).read()
# If there are no carriage returns (\r) just return
if text.find('\r') == -1:
return
# Takes care of Windows format
text = text.replace('\r\n', '\n')
# Takes care of old Mac format
text = text.replace('\r', '\n')
open(f, 'w').write(text)
def _parseTimestamp(t):
tokens = t.split()
day, month, year = [int(x) for x in tokens[0].split('/')]
if len(tokens) == 1:
hour = 0
minute = 0
else:
assert len(tokens) == 3
hour, minute, seconds = [int(x) for x in tokens[1].split(':')]
hour %= 12
if tokens[2] == 'PM':
hour += 12
result = datetime.datetime(year, month, day, hour, minute)
assert datetime.datetime(2010, 7, 2) <= result < datetime.datetime(2011, 1, 1)
return result
def _parseLine(line):
# Get rid of the double quotes arounf each field
line = line.replace('"', '')
# Split the line and get rid of the first field (running count)
fields = line[:-1].split(',')[1:]
gym = fields[0]
record = [gym] # Gym
# Add in an address for each Gym
gymAddresses = {
'Balgowlah Platinum': 'Shop 67 197-215 Condamine Street Balgowlah 2093',
'Lane Cove': '24-28 Lane Cove Plaza Lane Cove 2066',
'Mosman': '555 Military Rd Mosman 2088',
'North Sydney - Walker St': '100 Walker St North Sydney 2060',
'Randwick': 'Royal Randwick Shopping Centre 73 Belmore Rd Randwick 2031'
}
address = gymAddresses[gym]
record.append(address)
# Parse field 2 to a datetime object
record.append(_parseTimestamp(fields[1]))
# Add the consumption
record.append(float(fields[2]))
return record
def makeDataset():
"""
"""
inputFile = 'numenta_air_Con.csv'
fixEOL(inputFile)
fields = [
('gym', 'string', 'S'),
('address', 'string', ''),
('timestamp', 'datetime', 'T'),
('consumption', 'float', '')]
gymName = None
missing = 0
total = 0
# Create a the output file by parsing the customer given csv
with File('./hotgym2.csv', fields) as o:
with open(inputFile) as f:
# Skip header
f.readline()
# iterate over all the lines in the input file
for line in f.xreadlines():
# Parse the fields in the current line
record = _parseLine(line)
# Write the merged record to the output file
o.write(record)
if record[0] != gymName:
gymName = record[0]
print gymName
return total, missing
if __name__ == '__main__':
makeDataset()
print 'Done.'
| gpl-3.0 | -9,085,653,973,188,496,000 | 25.574194 | 80 | 0.62758 | false | 3.603675 | false | false | false |
sbarton272/AcousticBarcodes-Explorations | barcodes/dxfwrite/build/lib/dxfwrite/insert2.py | 2 | 3245 | #!/usr/bin/env python
#coding:utf-8
# Purpose: insert block references with appended attributes
# Created: 11.04.2010
# Copyright (C) 2010, Manfred Moitzi
# License: MIT License
"""
Provides the Insert2 composite-entity.
Insert a new block-reference with auto-creating of attribs from attdefs,
and setting attrib-text by the attribs-dict.
"""
__author__ = "mozman <mozman@gmx.at>"
from dxfwrite.entities import Insert
import dxfwrite.const as const
__all__ = ['Insert2']
class Insert2(object):
"""
Insert a new block-reference with auto-creating of attribs from attdefs,
and setting attrib-text by the attribs-dict.
"""
def __init__(self, blockdef, insert, attribs, rotation=0,
xscale=1., yscale=1., zscale=1.,
layer=const.BYBLOCK, color=const.BYLAYER, linetype=None):
"""
Insert a new block-reference with auto-creating of :ref:`ATTRIB` from
:ref:`ATTDEF`, and setting attrib-text by the attribs-dict.
(multi-insert is not supported)
:param blockdef: the block definition itself
:param insert: insert point (xy- or xyz-tuple), z-axis is 0 by default
:param float xscale: x-scale factor, default=1.
:param float yscale: y-scale factor, default=1.
:param float zscale: z-scale factor, default=1.
:param float rotation: rotation angle in degree, default=0.
:param dict attribs: dict with tag:value pairs, to fill the the attdefs in the
block-definition. example: {'TAG1': 'TextOfTAG1'}, create and insert
an attrib from an attdef (with tag-value == 'TAG1'), and set
text-value of the attrib to value 'TextOfTAG1'.
:param string linetype: linetype name, if not defined = **BYLAYER**
:param string layer: layer name
:param int color: range [1..255], 0 = **BYBLOCK**, 256 = **BYLAYER**
"""
self.blockdef = blockdef
self.insert = insert
self.attribs = attribs
self.xscale = xscale
self.yscale = yscale
self.zscale = zscale
self.rotation = rotation
self.layer = layer
self.color = color
self.linetype = linetype
def _build(self):
def set_tags(insert_entity):
basepoint = self.blockdef['basepoint']['xyz']
for tag, text in self.attribs.items():
try:
attdef = self.blockdef.find_attdef(tag)
attrib = attdef.new_attrib(text=text)
insert_entity.add(attrib, relative=True, block_basepoint=basepoint)
except KeyError: # no attdef <tag> found
pass
insert = Insert(blockname=self.blockdef['name'], insert=self.insert,
rotation=self.rotation,
layer=self.layer, color=self.color,
linetype=self.linetype)
for key, value in [('xscale', self.xscale),
('yscale', self.yscale),
('zscale', self.zscale)]:
if value != 1.:
insert[key] = value
set_tags(insert)
return insert.__dxf__()
def __dxf__(self):
return self._build()
| mit | -7,397,871,041,693,382,000 | 37.630952 | 87 | 0.59322 | false | 3.933333 | false | false | false |
dallaspythondojo/python | Woodall_Robert/Assignments/python_fundamentals/names.py | 2 | 2297 | '''
Part 1:
Given the following list:
students = [
{'first_name': 'Michael', 'last_name' : 'Jordan'},
{'first_name' : 'John', 'last_name' : 'Rosales'},
{'first_name' : 'Mark', 'last_name' : 'Guillen'},
{'first_name' : 'KB', 'last_name' : 'Tonel'}
]
Create a program that outputs:
Michael Jordan
John Rosales
Mark Guillen
KB Tonel
Part 2:
Given the following dictionary:
users = {
'Students': [
{'first_name': 'Michael', 'last_name' : 'Jordan'},
{'first_name' : 'John', 'last_name' : 'Rosales'},
{'first_name' : 'Mark', 'last_name' : 'Guillen'},
{'first_name' : 'KB', 'last_name' : 'Tonel'}
],
'Instructors': [
{'first_name' : 'Michael', 'last_name' : 'Choi'},
{'first_name' : 'Martin', 'last_name' : 'Puryear'}
]
}
Create a program that prints the following format (including number of characters in each combined name):
Students
1 - MICHAEL JORDAN - 13
2 - JOHN ROSALES - 11
3 - MARK GUILLEN - 11
4 - KB TONEL - 7
Instructors
1 - MICHAEL CHOI - 11
2 - MARTIN PURYEAR - 13
'''
students = [
{'first_name': 'Michael', 'last_name' : 'Jordan'},
{'first_name' : 'John', 'last_name' : 'Rosales'},
{'first_name' : 'Mark', 'last_name' : 'Guillen'},
{'first_name' : 'KB', 'last_name' : 'Tonel'}
]
# print just names for part 1
for dict in students:
print('{} {}'.format(dict['first_name'], dict['last_name']))
print # blank line
users = {
'Students': [
{'first_name': 'Michael', 'last_name' : 'Jordan'},
{'first_name' : 'John', 'last_name' : 'Rosales'},
{'first_name' : 'Mark', 'last_name' : 'Guillen'},
{'first_name' : 'KB', 'last_name' : 'Tonel'}
],
'Instructors': [
{'first_name' : 'Michael', 'last_name' : 'Choi'},
{'first_name' : 'Martin', 'last_name' : 'Puryear'}
]
}
# print names with associated index and number of chars
for user_item, user_list in users.iteritems():
dict_count = 0
print(user_item)
for dict in user_list:
dict_count += 1
print('{} - {} {} - {}'.format(str(dict_count),
dict['first_name'].upper(),
dict['last_name'].upper(),
str(len(dict['first_name']) + len(dict['last_name']))))
| mit | 360,202,921,939,173,440 | 25.709302 | 106 | 0.542011 | false | 2.922392 | false | false | false |
meego-tablet-ux/meego-app-browser | third_party/mesa/MesaLib/src/mapi/glapi/gen/gl_XML.py | 33 | 24745 | #!/usr/bin/env python
# (C) Copyright IBM Corporation 2004, 2005
# All Rights Reserved.
#
# Permission is hereby granted, free of charge, to any person obtaining a
# copy of this software and associated documentation files (the "Software"),
# to deal in the Software without restriction, including without limitation
# on the rights to use, copy, modify, merge, publish, distribute, sub
# license, and/or sell copies of the Software, and to permit persons to whom
# the Software is furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice (including the next
# paragraph) shall be included in all copies or substantial portions of the
# Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT. IN NO EVENT SHALL
# IBM AND/OR ITS SUPPLIERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
# FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
# IN THE SOFTWARE.
#
# Authors:
# Ian Romanick <idr@us.ibm.com>
import libxml2
import re, sys, string
import typeexpr
def parse_GL_API( file_name, factory = None ):
doc = libxml2.readFile( file_name, None, libxml2.XML_PARSE_XINCLUDE + libxml2.XML_PARSE_NOBLANKS + libxml2.XML_PARSE_DTDVALID + libxml2.XML_PARSE_DTDATTR + libxml2.XML_PARSE_DTDLOAD + libxml2.XML_PARSE_NOENT )
ret = doc.xincludeProcess()
if not factory:
factory = gl_item_factory()
api = factory.create_item( "api", None, None )
api.process_element( doc )
# After the XML has been processed, we need to go back and assign
# dispatch offsets to the functions that request that their offsets
# be assigned by the scripts. Typically this means all functions
# that are not part of the ABI.
for func in api.functionIterateByCategory():
if func.assign_offset:
func.offset = api.next_offset;
api.next_offset += 1
doc.freeDoc()
return api
def is_attr_true( element, name ):
"""Read a name value from an element's attributes.
The value read from the attribute list must be either 'true' or
'false'. If the value is 'false', zero will be returned. If the
value is 'true', non-zero will be returned. An exception will be
raised for any other value."""
value = element.nsProp( name, None )
if value == "true":
return 1
elif value == "false":
return 0
else:
raise RuntimeError('Invalid value "%s" for boolean "%s".' % (value, name))
class gl_print_base:
"""Base class of all API pretty-printers.
In the model-view-controller pattern, this is the view. Any derived
class will want to over-ride the printBody, printRealHader, and
printRealFooter methods. Some derived classes may want to over-ride
printHeader and printFooter, or even Print (though this is unlikely).
"""
def __init__(self):
# Name of the script that is generating the output file.
# Every derived class should set this to the name of its
# source file.
self.name = "a"
# License on the *generated* source file. This may differ
# from the license on the script that is generating the file.
# Every derived class should set this to some reasonable
# value.
#
# See license.py for an example of a reasonable value.
self.license = "The license for this file is unspecified."
# The header_tag is the name of the C preprocessor define
# used to prevent multiple inclusion. Typically only
# generated C header files need this to be set. Setting it
# causes code to be generated automatically in printHeader
# and printFooter.
self.header_tag = None
# List of file-private defines that must be undefined at the
# end of the file. This can be used in header files to define
# names for use in the file, then undefine them at the end of
# the header file.
self.undef_list = []
return
def Print(self, api):
self.printHeader()
self.printBody(api)
self.printFooter()
return
def printHeader(self):
"""Print the header associated with all files and call the printRealHeader method."""
print '/* DO NOT EDIT - This file generated automatically by %s script */' \
% (self.name)
print ''
print '/*'
print ' * ' + self.license.replace('\n', '\n * ')
print ' */'
print ''
if self.header_tag:
print '#if !defined( %s )' % (self.header_tag)
print '# define %s' % (self.header_tag)
print ''
self.printRealHeader();
return
def printFooter(self):
"""Print the header associated with all files and call the printRealFooter method."""
self.printRealFooter()
if self.undef_list:
print ''
for u in self.undef_list:
print "# undef %s" % (u)
if self.header_tag:
print ''
print '#endif /* !defined( %s ) */' % (self.header_tag)
def printRealHeader(self):
"""Print the "real" header for the created file.
In the base class, this function is empty. All derived
classes should over-ride this function."""
return
def printRealFooter(self):
"""Print the "real" footer for the created file.
In the base class, this function is empty. All derived
classes should over-ride this function."""
return
def printPure(self):
"""Conditionally define `PURE' function attribute.
Conditionally defines a preprocessor macro `PURE' that wraps
GCC's `pure' function attribute. The conditional code can be
easilly adapted to other compilers that support a similar
feature.
The name is also added to the file's undef_list.
"""
self.undef_list.append("PURE")
print """# if defined(__GNUC__) || (defined(__SUNPRO_C) && (__SUNPRO_C >= 0x590))
# define PURE __attribute__((pure))
# else
# define PURE
# endif"""
return
def printFastcall(self):
"""Conditionally define `FASTCALL' function attribute.
Conditionally defines a preprocessor macro `FASTCALL' that
wraps GCC's `fastcall' function attribute. The conditional
code can be easilly adapted to other compilers that support a
similar feature.
The name is also added to the file's undef_list.
"""
self.undef_list.append("FASTCALL")
print """# if defined(__i386__) && defined(__GNUC__) && !defined(__CYGWIN__) && !defined(__MINGW32__)
# define FASTCALL __attribute__((fastcall))
# else
# define FASTCALL
# endif"""
return
def printVisibility(self, S, s):
"""Conditionally define visibility function attribute.
Conditionally defines a preprocessor macro name S that wraps
GCC's visibility function attribute. The visibility used is
the parameter s. The conditional code can be easilly adapted
to other compilers that support a similar feature.
The name is also added to the file's undef_list.
"""
self.undef_list.append(S)
print """# if defined(__GNUC__) || (defined(__SUNPRO_C) && (__SUNPRO_C >= 0x590)) && defined(__ELF__)
# define %s __attribute__((visibility("%s")))
# else
# define %s
# endif""" % (S, s, S)
return
def printNoinline(self):
"""Conditionally define `NOINLINE' function attribute.
Conditionally defines a preprocessor macro `NOINLINE' that
wraps GCC's `noinline' function attribute. The conditional
code can be easilly adapted to other compilers that support a
similar feature.
The name is also added to the file's undef_list.
"""
self.undef_list.append("NOINLINE")
print """# if defined(__GNUC__) || (defined(__SUNPRO_C) && (__SUNPRO_C >= 0x590))
# define NOINLINE __attribute__((noinline))
# else
# define NOINLINE
# endif"""
return
def real_function_name(element):
name = element.nsProp( "name", None )
alias = element.nsProp( "alias", None )
if alias:
return alias
else:
return name
def real_category_name(c):
if re.compile("[1-9][0-9]*[.][0-9]+").match(c):
return "GL_VERSION_" + c.replace(".", "_")
else:
return c
def classify_category(name, number):
"""Based on the category name and number, select a numerical class for it.
Categories are divided into four classes numbered 0 through 3. The
classes are:
0. Core GL versions, sorted by version number.
1. ARB extensions, sorted by extension number.
2. Non-ARB extensions, sorted by extension number.
3. Un-numbered extensions, sorted by extension name.
"""
try:
core_version = float(name)
except Exception,e:
core_version = 0.0
if core_version > 0.0:
cat_type = 0
key = name
elif name.startswith("GL_ARB_") or name.startswith("GLX_ARB_") or name.startswith("WGL_ARB_"):
cat_type = 1
key = int(number)
else:
if number != None:
cat_type = 2
key = int(number)
else:
cat_type = 3
key = name
return [cat_type, key]
def create_parameter_string(parameters, include_names):
"""Create a parameter string from a list of gl_parameters."""
list = []
for p in parameters:
if p.is_padding:
continue
if include_names:
list.append( p.string() )
else:
list.append( p.type_string() )
if len(list) == 0: list = ["void"]
return string.join(list, ", ")
class gl_item:
def __init__(self, element, context):
self.context = context
self.name = element.nsProp( "name", None )
self.category = real_category_name( element.parent.nsProp( "name", None ) )
return
class gl_type( gl_item ):
def __init__(self, element, context):
gl_item.__init__(self, element, context)
self.size = int( element.nsProp( "size", None ), 0 )
te = typeexpr.type_expression( None )
tn = typeexpr.type_node()
tn.size = int( element.nsProp( "size", None ), 0 )
tn.integer = not is_attr_true( element, "float" )
tn.unsigned = is_attr_true( element, "unsigned" )
tn.name = "GL" + self.name
te.set_base_type_node( tn )
self.type_expr = te
return
def get_type_expression(self):
return self.type_expr
class gl_enum( gl_item ):
def __init__(self, element, context):
gl_item.__init__(self, element, context)
self.value = int( element.nsProp( "value", None ), 0 )
temp = element.nsProp( "count", None )
if not temp or temp == "?":
self.default_count = -1
else:
try:
c = int(temp)
except Exception,e:
raise RuntimeError('Invalid count value "%s" for enum "%s" in function "%s" when an integer was expected.' % (temp, self.name, n))
self.default_count = c
return
def priority(self):
"""Calculate a 'priority' for this enum name.
When an enum is looked up by number, there may be many
possible names, but only one is the 'prefered' name. The
priority is used to select which name is the 'best'.
Highest precedence is given to core GL name. ARB extension
names have the next highest, followed by EXT extension names.
Vendor extension names are the lowest.
"""
if self.name.endswith( "_BIT" ):
bias = 1
else:
bias = 0
if self.category.startswith( "GL_VERSION_" ):
priority = 0
elif self.category.startswith( "GL_ARB_" ):
priority = 2
elif self.category.startswith( "GL_EXT_" ):
priority = 4
else:
priority = 6
return priority + bias
class gl_parameter:
def __init__(self, element, context):
self.name = element.nsProp( "name", None )
ts = element.nsProp( "type", None )
self.type_expr = typeexpr.type_expression( ts, context )
temp = element.nsProp( "variable_param", None )
if temp:
self.count_parameter_list = temp.split( ' ' )
else:
self.count_parameter_list = []
# The count tag can be either a numeric string or the name of
# a variable. If it is the name of a variable, the int(c)
# statement will throw an exception, and the except block will
# take over.
c = element.nsProp( "count", None )
try:
count = int(c)
self.count = count
self.counter = None
except Exception,e:
count = 1
self.count = 0
self.counter = c
self.count_scale = int(element.nsProp( "count_scale", None ))
elements = (count * self.count_scale)
if elements == 1:
elements = 0
#if ts == "GLdouble":
# print '/* stack size -> %s = %u (before)*/' % (self.name, self.type_expr.get_stack_size())
# print '/* # elements = %u */' % (elements)
self.type_expr.set_elements( elements )
#if ts == "GLdouble":
# print '/* stack size -> %s = %u (after) */' % (self.name, self.type_expr.get_stack_size())
self.is_client_only = is_attr_true( element, 'client_only' )
self.is_counter = is_attr_true( element, 'counter' )
self.is_output = is_attr_true( element, 'output' )
# Pixel data has special parameters.
self.width = element.nsProp('img_width', None)
self.height = element.nsProp('img_height', None)
self.depth = element.nsProp('img_depth', None)
self.extent = element.nsProp('img_extent', None)
self.img_xoff = element.nsProp('img_xoff', None)
self.img_yoff = element.nsProp('img_yoff', None)
self.img_zoff = element.nsProp('img_zoff', None)
self.img_woff = element.nsProp('img_woff', None)
self.img_format = element.nsProp('img_format', None)
self.img_type = element.nsProp('img_type', None)
self.img_target = element.nsProp('img_target', None)
self.img_pad_dimensions = is_attr_true( element, 'img_pad_dimensions' )
self.img_null_flag = is_attr_true( element, 'img_null_flag' )
self.img_send_null = is_attr_true( element, 'img_send_null' )
self.is_padding = is_attr_true( element, 'padding' )
return
def compatible(self, other):
return 1
def is_array(self):
return self.is_pointer()
def is_pointer(self):
return self.type_expr.is_pointer()
def is_image(self):
if self.width:
return 1
else:
return 0
def is_variable_length(self):
return len(self.count_parameter_list) or self.counter
def is_64_bit(self):
count = self.type_expr.get_element_count()
if count:
if (self.size() / count) == 8:
return 1
else:
if self.size() == 8:
return 1
return 0
def string(self):
return self.type_expr.original_string + " " + self.name
def type_string(self):
return self.type_expr.original_string
def get_base_type_string(self):
return self.type_expr.get_base_name()
def get_dimensions(self):
if not self.width:
return [ 0, "0", "0", "0", "0" ]
dim = 1
w = self.width
h = "1"
d = "1"
e = "1"
if self.height:
dim = 2
h = self.height
if self.depth:
dim = 3
d = self.depth
if self.extent:
dim = 4
e = self.extent
return [ dim, w, h, d, e ]
def get_stack_size(self):
return self.type_expr.get_stack_size()
def size(self):
if self.is_image():
return 0
else:
return self.type_expr.get_element_size()
def get_element_count(self):
c = self.type_expr.get_element_count()
if c == 0:
return 1
return c
def size_string(self, use_parens = 1):
s = self.size()
if self.counter or self.count_parameter_list:
list = [ "compsize" ]
if self.counter and self.count_parameter_list:
list.append( self.counter )
elif self.counter:
list = [ self.counter ]
if s > 1:
list.append( str(s) )
if len(list) > 1 and use_parens :
return "(%s)" % (string.join(list, " * "))
else:
return string.join(list, " * ")
elif self.is_image():
return "compsize"
else:
return str(s)
def format_string(self):
if self.type_expr.original_string == "GLenum":
return "0x%x"
else:
return self.type_expr.format_string()
class gl_function( gl_item ):
def __init__(self, element, context):
self.context = context
self.name = None
self.entry_points = []
self.return_type = "void"
self.parameters = []
self.offset = -1
self.initialized = 0
self.images = []
self.assign_offset = 0
self.static_entry_points = []
# Track the parameter string (for the function prototype)
# for each entry-point. This is done because some functions
# change their prototype slightly when promoted from extension
# to ARB extension to core. glTexImage3DEXT and glTexImage3D
# are good examples of this. Scripts that need to generate
# code for these differing aliases need to real prototype
# for each entry-point. Otherwise, they may generate code
# that won't compile.
self.parameter_strings = {}
self.process_element( element )
return
def process_element(self, element):
name = element.nsProp( "name", None )
alias = element.nsProp( "alias", None )
if is_attr_true(element, "static_dispatch"):
self.static_entry_points.append(name)
self.entry_points.append( name )
if alias:
true_name = alias
else:
true_name = name
# Only try to set the offset when a non-alias
# entry-point is being processes.
offset = element.nsProp( "offset", None )
if offset:
try:
o = int( offset )
self.offset = o
except Exception, e:
self.offset = -1
if offset == "assign":
self.assign_offset = 1
if not self.name:
self.name = true_name
elif self.name != true_name:
raise RuntimeError("Function true name redefined. Was %s, now %s." % (self.name, true_name))
# There are two possible cases. The first time an entry-point
# with data is seen, self.initialized will be 0. On that
# pass, we just fill in the data. The next time an
# entry-point with data is seen, self.initialized will be 1.
# On that pass we have to make that the new values match the
# valuse from the previous entry-point.
parameters = []
return_type = "void"
child = element.children
while child:
if child.type == "element":
if child.name == "return":
return_type = child.nsProp( "type", None )
elif child.name == "param":
param = self.context.factory.create_item( "parameter", child, self.context)
parameters.append( param )
child = child.next
if self.initialized:
if self.return_type != return_type:
raise RuntimeError( "Return type changed in %s. Was %s, now %s." % (name, self.return_type, return_type))
if len(parameters) != len(self.parameters):
raise RuntimeError( "Parameter count mismatch in %s. Was %d, now %d." % (name, len(self.parameters), len(parameters)))
for j in range(0, len(parameters)):
p1 = parameters[j]
p2 = self.parameters[j]
if not p1.compatible( p2 ):
raise RuntimeError( 'Parameter type mismatch in %s. "%s" was "%s", now "%s".' % (name, p2.name, p2.type_expr.original_string, p1.type_expr.original_string))
if true_name == name or not self.initialized:
self.return_type = return_type
self.parameters = parameters
for param in self.parameters:
if param.is_image():
self.images.append( param )
if element.children:
self.initialized = 1
self.parameter_strings[name] = create_parameter_string(parameters, 1)
else:
self.parameter_strings[name] = None
return
def get_images(self):
"""Return potentially empty list of input images."""
return self.images
def parameterIterator(self):
return self.parameters.__iter__();
def get_parameter_string(self, entrypoint = None):
if entrypoint:
s = self.parameter_strings[ entrypoint ]
if s:
return s
return create_parameter_string( self.parameters, 1 )
def get_called_parameter_string(self):
p_string = ""
comma = ""
for p in self.parameterIterator():
p_string = p_string + comma + p.name
comma = ", "
return p_string
def is_abi(self):
return (self.offset >= 0 and not self.assign_offset)
def is_static_entry_point(self, name):
return name in self.static_entry_points
def dispatch_name(self):
if self.name in self.static_entry_points:
return self.name
else:
return "_dispatch_stub_%u" % (self.offset)
def static_name(self, name):
if name in self.static_entry_points:
return name
else:
return "_dispatch_stub_%u" % (self.offset)
class gl_item_factory:
"""Factory to create objects derived from gl_item."""
def create_item(self, item_name, element, context):
if item_name == "function":
return gl_function(element, context)
if item_name == "type":
return gl_type(element, context)
elif item_name == "enum":
return gl_enum(element, context)
elif item_name == "parameter":
return gl_parameter(element, context)
elif item_name == "api":
return gl_api(self)
else:
return None
class gl_api:
def __init__(self, factory):
self.functions_by_name = {}
self.enums_by_name = {}
self.types_by_name = {}
self.category_dict = {}
self.categories = [{}, {}, {}, {}]
self.factory = factory
self.next_offset = 0
typeexpr.create_initial_types()
return
def process_element(self, doc):
element = doc.children
while element.type != "element" or element.name != "OpenGLAPI":
element = element.next
if element:
self.process_OpenGLAPI(element)
return
def process_OpenGLAPI(self, element):
child = element.children
while child:
if child.type == "element":
if child.name == "category":
self.process_category( child )
elif child.name == "OpenGLAPI":
self.process_OpenGLAPI( child )
child = child.next
return
def process_category(self, cat):
cat_name = cat.nsProp( "name", None )
cat_number = cat.nsProp( "number", None )
[cat_type, key] = classify_category(cat_name, cat_number)
self.categories[cat_type][key] = [cat_name, cat_number]
child = cat.children
while child:
if child.type == "element":
if child.name == "function":
func_name = real_function_name( child )
temp_name = child.nsProp( "name", None )
self.category_dict[ temp_name ] = [cat_name, cat_number]
if self.functions_by_name.has_key( func_name ):
func = self.functions_by_name[ func_name ]
func.process_element( child )
else:
func = self.factory.create_item( "function", child, self )
self.functions_by_name[ func_name ] = func
if func.offset >= self.next_offset:
self.next_offset = func.offset + 1
elif child.name == "enum":
enum = self.factory.create_item( "enum", child, self )
self.enums_by_name[ enum.name ] = enum
elif child.name == "type":
t = self.factory.create_item( "type", child, self )
self.types_by_name[ "GL" + t.name ] = t
child = child.next
return
def functionIterateByCategory(self, cat = None):
"""Iterate over functions by category.
If cat is None, all known functions are iterated in category
order. See classify_category for details of the ordering.
Within a category, functions are sorted by name. If cat is
not None, then only functions in that category are iterated.
"""
lists = [{}, {}, {}, {}]
for func in self.functionIterateAll():
[cat_name, cat_number] = self.category_dict[func.name]
if (cat == None) or (cat == cat_name):
[func_cat_type, key] = classify_category(cat_name, cat_number)
if not lists[func_cat_type].has_key(key):
lists[func_cat_type][key] = {}
lists[func_cat_type][key][func.name] = func
functions = []
for func_cat_type in range(0,4):
keys = lists[func_cat_type].keys()
keys.sort()
for key in keys:
names = lists[func_cat_type][key].keys()
names.sort()
for name in names:
functions.append(lists[func_cat_type][key][name])
return functions.__iter__()
def functionIterateByOffset(self):
max_offset = -1
for func in self.functions_by_name.itervalues():
if func.offset > max_offset:
max_offset = func.offset
temp = [None for i in range(0, max_offset + 1)]
for func in self.functions_by_name.itervalues():
if func.offset != -1:
temp[ func.offset ] = func
list = []
for i in range(0, max_offset + 1):
if temp[i]:
list.append(temp[i])
return list.__iter__();
def functionIterateAll(self):
return self.functions_by_name.itervalues()
def enumIterateByName(self):
keys = self.enums_by_name.keys()
keys.sort()
list = []
for enum in keys:
list.append( self.enums_by_name[ enum ] )
return list.__iter__()
def categoryIterate(self):
"""Iterate over categories.
Iterate over all known categories in the order specified by
classify_category. Each iterated value is a tuple of the
name and number (which may be None) of the category.
"""
list = []
for cat_type in range(0,4):
keys = self.categories[cat_type].keys()
keys.sort()
for key in keys:
list.append(self.categories[cat_type][key])
return list.__iter__()
def get_category_for_name( self, name ):
if self.category_dict.has_key(name):
return self.category_dict[name]
else:
return ["<unknown category>", None]
def typeIterate(self):
return self.types_by_name.itervalues()
def find_type( self, type_name ):
if type_name in self.types_by_name:
return self.types_by_name[ type_name ].type_expr
else:
print "Unable to find base type matching \"%s\"." % (type_name)
return None
| bsd-3-clause | -8,889,399,866,411,980,000 | 24.589452 | 210 | 0.666114 | false | 3.14342 | false | false | false |
PixelStereo/pybush | pybush/state.py | 1 | 1455 | #! /usr/bin/env python
# -*- coding: utf-8 -*-
#
"""
A State is a copy of a Value at a certain state
"""
from pybush.value import Value
from pybush.basic import Basic
from pybush.constants import __dbug__
from pybush.functions import set_attributes
class State(Value, Basic):
"""
A State is afrozen state of a param
"""
def __init__(self, **kwargs):
super(State, self).__init__(**kwargs)
if __dbug__:
print('creating a state')
set_attributes(self, kwargs)
def __repr__(self):
printer = 'State(name:{name}, '\
'description:{description}, '\
'tags:{tags}, '\
'raw:{raw}, '\
'value:{value}, '\
'datatype:{datatype}, '\
'domain:{domain}, '\
'clipmode:{clipmode}, '\
'unique:{unique})'
return printer.format( name=self.name,\
description=self.description, \
tags=self.tags,\
raw=self.raw,\
value=self.value, \
datatype=self.datatype,
domain=self.domain, \
clipmode=self.clipmode, \
unique=self.unique)
| gpl-3.0 | -7,981,826,928,319,378,000 | 32.837209 | 63 | 0.42268 | false | 4.89899 | false | false | false |
kurrik/laulik | src/server/server.py | 1 | 2251 | from ansi2html import Ansi2HTMLConverter
import argparse
from flask import Flask
from flask import Markup
from flask import Response
from flask import make_response
from flask import render_template
from flask import request
from flask import send_file
import github
import laulik
import os
app = Flask(__name__)
repopath = os.environ.get('REPOPATH')
app.logger.info("Repo path: ", repopath)
laulik_api = laulik.API(repopath=repopath)
github_api = github.API(repopath=repopath)
conv = Ansi2HTMLConverter(markup_lines=True)
@app.route('/', methods=['GET', 'POST'])
def root():
data = {}
data['git_info'] = github_api.info()
data['server_version'] = laulik_api.server_version()
if request.method == 'POST':
key = request.form['key']
result = laulik_api.build(key)
data['msg'] = Markup('Built project <strong>{0}</strong>'.format(key))
data['output'] = Markup(conv.convert(result.stdout, full=False))
data['projects'] = laulik_api.projects()
return render_template('index.html', **data)
@app.route('/build/<key>/<version>.pdf')
def pdf(key, version):
meta = laulik_api.safe_get_meta(key, version)
if meta is None:
return 'Not found!', 404
return send_file(
meta.paths.pdf,
mimetype='application/pdf',
as_attachment=True,
attachment_filename='{0}-{1}.pdf'.format(meta.key, meta.version))
@app.route('/build/<key>/<version>.tex')
def tex(key, version):
meta = laulik_api.safe_get_meta(key, version)
if meta is None:
return 'Not found!', 404
return send_file(
meta.paths.latex,
mimetype='text/plain',
as_attachment=True,
attachment_filename='{0}-{1}.tex'.format(meta.key, meta.version))
@app.route('/webhook', methods=['POST'])
def webhook():
data = {}
if request.headers.get('X-GitHub-Event') == 'push':
data['req'] = github_api.parse_webhook(request.get_json(force=True))
result = github_api.pull()
data['stdout'] = result.stdout
data['stderr'] = result.stderr
data['action'] = 'Pulled git repo'
resp = make_response(render_template('webhook.txt', **data), 200)
resp.headers['Content-Type'] = 'text/plain'
return resp
if __name__ == '__main__':
app.run(host='0.0.0.0', debug=True, port=int(os.environ.get('PORT', 8080)))
| apache-2.0 | 6,871,222,778,806,686,000 | 31.157143 | 77 | 0.676588 | false | 3.248196 | false | false | false |
gsamokovarov/frames.py | frames/__init__.py | 1 | 5858 | '''
__
/ _|_ __ __ _ _ __ ___ ___ ___ _ __ _ _
| |_| '__/ _` | '_ ` _ \ / _ \/ __| | '_ \| | | |
| _| | | (_| | | | | | | __/\__ \_| |_) | |_| |
|_| |_| \__,_|_| |_| |_|\___||___(_) .__/ \__, |
|_| |__/
'''
__all__ = [
'FrameNotFound', 'FrameType', 'Frame', 'current_frame', 'locate_frame'
]
import sys
NATIVE = hasattr(sys, '_getframe')
def _getframe(*args, **kw):
# Delegates to the underlying `_getframe.` If we don't do that we lose the
# ability to force `NATIVE` to `False`, after importing the module.
from .compat import _getframe
return _getframe(*args, **kw).f_back
# Make classes new-style by default.
__metaclass__ = type
class Frame:
'''
Wrapper object for the internal frames.
'''
class NotFound(LookupError):
'''
Raised when no frame is found.
'''
Type = sys._getframe().__class__
@staticmethod
def current_frame(raw=False):
'''
Gives the current execution frame.
:returns:
The current execution frame that is actually executing this.
'''
# `import sys` is important here, because the `sys` module is special
# and we will end up with the class frame instead of the `current` one.
if NATIVE:
import sys
frame = sys._getframe()
else:
frame = _getframe()
frame = frame.f_back
if not raw:
frame = Frame(frame)
return frame
@staticmethod
def locate(callback, root_frame=None, include_root=False, raw=False):
'''
Locates a frame by criteria.
:param callback:
One argument function to check the frame against. The frame we are
curretly on, is given as that argument.
:param root_frame:
The root frame to start the search from. Can be a callback taking
no arguments.
:param include_root:
`True` if the search should start from the `root_frame` or the one
beneath it. Defaults to `False`.
:param raw:
whether to use raw frames or wrap them in our own object. Defaults to
`False`.
:raises RuntimeError:
When no matching frame is found.
:returns:
The first frame which responds to the `callback`.
'''
def get_from(maybe_callable):
if callable(maybe_callable):
return maybe_callable()
return maybe_callable
# Creates new frames, whether raw or not.
new = lambda frame: frame if raw else Frame(frame)
current_frame = get_from(root_frame or Frame.current_frame(raw=True))
current_frame = new(current_frame)
if not include_root:
current_frame = new(current_frame.f_back)
# The search will stop, because at some point the frame will be falsy.
while current_frame:
found = callback(current_frame)
if found:
return current_frame
current_frame = new(current_frame.f_back)
raise Frame.NotFound('No matching frame found')
def __init__(self, frame):
'''
Wraps the raw frame object.
:param frame:
The frame object to wrap.
'''
self.frame = frame
if not frame:
return
# Read-only attributes go below.
#: Shortcut for `f_back`
self.back = frame.f_back
#: Shortcut for `f_builtins`
self.builtins = frame.f_builtins
#: Shortcut for `f_code`
self.code = frame.f_code
#: Shortcut for `f_globals`
self.globals = frame.f_globals
#: Shortcut for `f_locals`.
self.locals = frame.f_locals
#: Shortcut for `f_restricted`.
self.restricted = frame.f_restricted
# Special attributes are defined as properties.
@property
def exc_traceback(self):
'''
Shortcut for `f_exc_traceback`.
:returns:
The frame exception traceback, if any.
'''
return self.frame.f_exc_traceback
@property
def exc_type(self):
'''
Shortcut for `f_exc_type`.
:returns:
The frame exception class, if any.
'''
return self.frame.f_exc_type
@property
def exc_value(self):
'''
Shortcut for `f_exc_value`.
:returns:
The frame exception instance, if any.
'''
return self.frame.f_exc_value
@property
def last_instruction(self):
'''
Shortcut for `f_lasti`
:returns:
The last frame instruction.
'''
return self.frame.f_lasti
@property
def lineno(self):
'''
Shortcut for `f_lineno`.
:returns:
The line of the code at the current frame.
'''
return self.frame.f_lineno - 1
@property
def trace(self):
'''
Shortcut for `f_trace`.
:returns:
The trace function, if any.
'''
return self.frame.f_trace
@property
def __class__(self):
# Make us look like a regular frame in front of `isinstance`.
return Frame.Type
def __getattr__(self, name):
# Proxy some methods back to the raw frame object.
if not hasattr(self.frame, name):
raise AttributeError(name)
return getattr(self.frame, name)
def __bool__(self):
return True if self.frame else False
__nonzero__ = __bool__
# More standard, non classy Python interface.
FrameNotFound = Frame.NotFound
FrameType = Frame.Type
locate_frame = Frame.locate
current_frame = Frame.current_frame
| mit | -8,642,424,317,072,030,000 | 22.813008 | 81 | 0.534483 | false | 4.169395 | false | false | false |
hamukichi/ironpycompiler | ironpycompiler/exceptions.py | 1 | 1978 | #!/usr/bin/env python
# -*- coding: utf-8 -*-
""" This module contains execptions of IronPyCompiler.
"""
class IPCError(Exception):
"""This is the base class for exceptions in this module.
"""
pass
class IronPythonDetectionError(IPCError):
"""This exception will be raised if IronPython cannot be found in your system.
:param str executable: (optional) The name of the IronPython
executable looked for. This argument remains
for backward compatibility.
:param msg: (optional) The detailed information of the error.
.. versionchanged:: 0.9.0
The argument ``executable`` became optional, and ``msg`` was added.
"""
def __init__(self, executable=None, msg=None):
self.executable = executable
self.msg = msg
def __str__(self):
if self.executable is not None:
return "IronPython (%s) cannot be found." % str(self.executable)
elif self.msg is not None:
return str(self.msg)
else:
return "IronPython cannot be found."
class ModuleCompilationError(IPCError):
"""This exception means an error during compilation.
:param msg: (optional) The detailed information of the error.
.. versionadded:: 0.10.0
"""
def __init__(self, msg=None):
self.msg = msg
def __str__(self):
if self.msg is not None:
return str(self.msg)
else:
return "An error occurred during compilation."
class IronPythonValidationError(IPCError):
"""Raised if the specified executable is not a valid IronPython executable.
:param msg: (optional) The detailed information of the error.
.. versionadded:: 1.0.0
"""
def __init__(self, msg=None):
self.msg = msg
def __str__(self):
if self.msg is not None:
return str(self.msg)
else:
return "Not a valid IronPython executable."
| mit | -8,640,078,560,626,378,000 | 23.419753 | 82 | 0.609707 | false | 4.262931 | false | false | false |
ZombieNinjaPirate/pypkg | HonSSH/GEO.py | 1 | 1855 | """
Copyright (c) 2014, Are Hansen - Honeypot Development.
All rights reserved.
Redistribution and use in source and binary forms, with or without modification, are permitted
provided that the following conditions are met:
1. Redistributions of source code must retain the above copyright notice, this list of conditions
and the following disclaimer.
2. Redistributions in binary form must reproduce the above copyright notice, this list of conditions
and the following disclaimer in the documentation and/or other materials provided with the
distribution.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND AN EXPRESS OR
IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND
FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR
CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER
IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF
THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
"""
__author__ = 'Are Hansen'
__date__ = '2014, July 25'
__version__ = '0.0.1'
import GeoIP
def accessCC(item_list):
"""Preforms a geoip lookup insertion directly from the daily log lines, appends the results to
the output list and returned. """
gip = GeoIP.new(GeoIP.GEOIP_MEMORY_CACHE)
output = []
for item in item_list:
item = item.split(' ')
geo = gip.country_code_by_addr(item[2])
out = item[0], item[1], item[3], item[4], item[2], geo
output.append(out)
return output | gpl-3.0 | 809,056,899,135,910,100 | 38.489362 | 100 | 0.750943 | false | 4.177928 | false | false | false |
jawilson/home-assistant | homeassistant/components/notify/aws_lambda.py | 2 | 2919 | """
AWS Lambda platform for notify component.
For more details about this platform, please refer to the documentation at
https://home-assistant.io/components/notify.aws_lambda/
"""
import logging
import json
import base64
import voluptuous as vol
from homeassistant.const import (
CONF_PLATFORM, CONF_NAME)
from homeassistant.components.notify import (
ATTR_TARGET, PLATFORM_SCHEMA, BaseNotificationService)
import homeassistant.helpers.config_validation as cv
_LOGGER = logging.getLogger(__name__)
REQUIREMENTS = ["boto3==1.3.1"]
CONF_REGION = 'region_name'
CONF_ACCESS_KEY_ID = 'aws_access_key_id'
CONF_SECRET_ACCESS_KEY = 'aws_secret_access_key'
CONF_PROFILE_NAME = 'profile_name'
CONF_CONTEXT = 'context'
ATTR_CREDENTIALS = 'credentials'
PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend({
vol.Optional(CONF_REGION, default="us-east-1"): cv.string,
vol.Inclusive(CONF_ACCESS_KEY_ID, ATTR_CREDENTIALS): cv.string,
vol.Inclusive(CONF_SECRET_ACCESS_KEY, ATTR_CREDENTIALS): cv.string,
vol.Exclusive(CONF_PROFILE_NAME, ATTR_CREDENTIALS): cv.string,
vol.Optional(CONF_CONTEXT, default=dict()): vol.Coerce(dict)
})
def get_service(hass, config):
"""Get the AWS Lambda notification service."""
context_str = json.dumps({'hass': hass.config.as_dict(),
'custom': config[CONF_CONTEXT]})
context_b64 = base64.b64encode(context_str.encode("utf-8"))
context = context_b64.decode("utf-8")
# pylint: disable=import-error
import boto3
aws_config = config.copy()
del aws_config[CONF_PLATFORM]
del aws_config[CONF_NAME]
del aws_config[CONF_CONTEXT]
profile = aws_config.get(CONF_PROFILE_NAME)
if profile is not None:
boto3.setup_default_session(profile_name=profile)
del aws_config[CONF_PROFILE_NAME]
lambda_client = boto3.client("lambda", **aws_config)
return AWSLambda(lambda_client, context)
# pylint: disable=too-few-public-methods
class AWSLambda(BaseNotificationService):
"""Implement the notification service for the AWS Lambda service."""
def __init__(self, lambda_client, context):
"""Initialize the service."""
self.client = lambda_client
self.context = context
def send_message(self, message="", **kwargs):
"""Send notification to specified LAMBDA ARN."""
targets = kwargs.get(ATTR_TARGET)
if not targets:
_LOGGER.info("At least 1 target is required")
return
if not isinstance(targets, list):
targets = [targets]
for target in targets:
cleaned_kwargs = dict((k, v) for k, v in kwargs.items() if v)
payload = {"message": message}
payload.update(cleaned_kwargs)
self.client.invoke(FunctionName=target,
Payload=json.dumps(payload),
ClientContext=self.context)
| mit | -5,085,784,450,656,886,000 | 30.728261 | 74 | 0.666324 | false | 3.781088 | true | false | false |
npinto/Oger | Oger/examples/cma_es.py | 1 | 2538 | import Oger
import scipy as sp
import time
import mdp.parallel
if __name__ == '__main__':
''' Example of using CMA_ES to optimize the parameters of a reservoir+readout on the NRMSE for NARMA30, once sequentially and once in parallel if the machine is multicore.
The CMA-ES is given an initial value x0 and standard devation for each of the parameters.
'''
input_size = 1
inputs, outputs = Oger.datasets.narma30()
data = [[], zip(inputs, outputs)]
# construct individual nodes
reservoir = Oger.nodes.ReservoirNode(input_size, 100)
readout = Oger.nodes.RidgeRegressionNode()
# build network with MDP framework
flow = mdp.Flow([reservoir, readout])
# Nested dictionary
# For cma_es, each parameter 'range' consists of an initial value and a standard deviation
# For input_scaling, x0=.3 and std = .5
# For spectral_radius, x0 = .9 and std = .5
gridsearch_parameters = {reservoir:{'input_scaling': mdp.numx.array([0.3, .5]), 'spectral_radius':mdp.numx.array([.9, .5])}}
# Instantiate an optimizer
opt = Oger.evaluation.Optimizer(gridsearch_parameters, Oger.utils.nrmse)
# # Additional options to be passed to the CMA-ES algorithm. We impose a lower bound on the input_scaling such that values of zero
# # do not occur (this causes an error in the training of the readout because the reservoir output is all zeros).
options = {'maxiter':20, 'bounds':[0.01, None]}
# Do the optimization
print 'Parallel execution...'
# Instantiate a new optimizer, otherwise CMA_ES doesn't
opt = Oger.evaluation.Optimizer(gridsearch_parameters, Oger.utils.nrmse)
opt.scheduler = mdp.parallel.ProcessScheduler(n_processes=2)
#opt.scheduler = Oger.parallel.GridScheduler()
mdp.activate_extension("parallel")
start_time = time.time()
opt.cma_es(data, flow, cross_validate_function=Oger.evaluation.n_fold_random, n_folds=5, options=options)
par_duration = int(time.time() - start_time)
print 'Duration: ' + str(par_duration) + 's'
# Get the optimal flow and run cross-validation with it
opt_flow = opt.get_optimal_flow()
print 'Performing cross-validation with the optimal flow. Note that this result can differ slightly from the one above because of different choices of randomization of the folds.'
errors = Oger.evaluation.validate(data, opt_flow, Oger.utils.nrmse, cross_validate_function=Oger.evaluation.n_fold_random, n_folds=5, progress=False)
print 'Mean error over folds: ' + str(sp.mean(errors))
| gpl-3.0 | 5,014,727,968,097,656,000 | 46.886792 | 183 | 0.710402 | false | 3.520111 | false | false | false |
opencog/destin | Destin/Bindings/Python/czt_mod.py | 2 | 2158 | # -*- coding: utf-8 -*-
"""
Created on Wed May 8 12:49:09 2013
@author: teaera
"""
import os
import cv2.cv as cv
import pydestin as pd
#cl = pd.czt_lib()
cm = pd.CztMod()
#############################################################################
"""
Save the current user's home folder.
"""
homeFld = os.getenv("HOME")
if not homeFld:
homeFld = os.getenv("USERPROFILE")
"""
Display centroids images!
"""
def dcis(network, layer):
network.displayLayerCentroidImages(layer,1000)
cv.WaitKey(100)
"""
Save centroids images!
"""
def saveCens(network, layer, saveLoc):
network.saveLayerCentroidImages(layer, saveLoc)
"""
Load images in one folder into an 'ims'!!!
"""
def load_ims_fld(ims, fld):
if not fld.endswith("/"):
fld += "/"
for each in os.listdir(fld):
ims.addImage(fld + each)
"""
Used to init DeSTIN, but compatible by setting 'extRatio'!
"""
def init_destin(siw=pd.W512, nLayer=8, centroids=[4,8,16,32,64,32,16,8],
isUniform=True, imageMode=pd.DST_IMG_MODE_GRAYSCALE):
temp_network = pd.DestinNetworkAlt(siw, nLayer, centroids, isUniform, imageMode)
#temp_network.setBeliefTransform(pd.DST_BT_NONE)
return temp_network
"""
Use the existing network and ims to train!
Default number is 16,000.
"""
def train_ims(network, ims, maxCount=16000):
for i in range(maxCount):
if i % 10 == 0:
print "Iteration " + str(i)
ims.findNextImage()
f = ims.getGrayImageFloat()
network.doDestin(f)
"""
Use one folder as input, and use another folder as additional info!
"""
def train_2flds(network, fld1, fld2, repeatCount=1600):
if not fld1.endswith("/"):
fld1 += "/"
if not fld2.endswith("/"):
fld2 += "/"
for i in range(repeatCount):
if i % 10 == 0:
print "RepeatTime: " + str(i)
for each in os.listdir(fld1):
f = cl.combineImgs(fld1+each, fld2+each)
network.doDestin(f)
"""
Get the time stamp for today
"""
import datetime
def getTimeStamp():
now = datetime.datetime.now()
return str(now.year) + "." + str(now.month) + "." + str(now.day)
| lgpl-3.0 | -3,027,028,483,499,630,600 | 23.247191 | 84 | 0.601483 | false | 3.141194 | false | false | false |
ahri/flask-snooze | flask_snooze.py | 1 | 9579 | # coding: utf-8
"""
Snooze: a backend-agnostic REST API provider for Flask.
e.g.
from flask import app, Blueprint
from flask.ext.sqlalchemy import SQLAlchemy
from flask.ext.snooze import Snooze, SqlAlchemyEndpoint
from my_model import sqlalchemy_db, Book
api = Blueprint('api_v1', __name__)
apimgr = Snooze(api)
apimgr.add(SqlAlchemyEndpoint(sqlalchemy_db, Book, ['author', 'title']))
app.register_blueprint(api, url_prefix='/api_v1')
"""
from flask import request, make_response
import re
try:
import simplejson as json
except ImportError:
import json
class NotFoundError(Exception):
"""
Resource not found.
"""
def __init__(self, cls, path):
super(NotFoundError, self).__init__()
self.cls = cls
self.path = path
self.message = 'No %(cls)s exists with an ID of %(path)s' % dict(
cls=cls.__name__,
path=path
)
def error_dict(etype, message, **kwargs):
d = dict(type=etype, message=message)
if kwargs:
d['detail'] = kwargs
return d
class CoerceToDictEncoder(json.JSONEncoder):
"""
A fairly naive encoder that will try to convert unrecognised types to dict.
The idea being that objects can be made iterable quite easily as a bridge
to being converted to JSON.
"""
def default(self, obj):
if obj is None or type(obj) in (
dict,
list, tuple,
str, unicode,
int, long, float,
bool):
return json.JSONEncoder.default(self, obj)
return dict(obj)
def wrap_verb_call(call, endpoint, data_in, data_out):
"""
Construct a callback that will wrap a given HTTP Verb call, passing a path.
"""
def f(path=None):
data = data_in(request.data) if request.data != '' else dict()
assert isinstance(data, dict), "Data must be a dict"
try:
res = call(endpoint, path, data)
try:
# NB. error_data used because Flask stringifies stuff we put
# into res.data, which isn't good for us
res.data = data_out(res.error_data)
except AttributeError:
try:
res.data = data_out(res.data)
except AttributeError:
res = data_out(res)
except NotFoundError, e:
res = make_response()
res.status = '404'
res.data = data_out(error_dict(**{
'etype': type(e).__name__,
'message': e.message,
'class': e.cls.__name__,
'path': e.path
}))
except:
import sys
from traceback import extract_tb
exc_type, exc_value, exc_traceback = sys.exc_info()
res = data_out(error_dict(exc_type.__name__,
exc_value.message,
traceback=extract_tb(exc_traceback))), '500'
return res
return f
def response_redirect(endpoint, o, code):
r = make_response()
r.headers['Location'] = '%(path)s%(id)s' % dict(
path=re.sub('[^/]*$', '', request.path),
id=getattr(o, endpoint.id_key)
)
r.status = str(code)
return r
class Snooze(object):
"""
The API context manager,
The api level means:
every verb takes in and gives out data in the same ways
"""
def __init__(self, app, hooks=None):
self._app = app
hooks = dict() if hooks is None else hooks
self._hook_data_in = hooks.get('data_in', json.loads)
self._hook_data_out = hooks.get('data_out', CoerceToDictEncoder().encode)
self._routes = {}
def add(self, endpoint, name=None, methods=(
'OPTIONS', 'POST', 'GET', 'PUT', 'PATCH', 'DELETE')):
"""
Add an endpoint for a class, the name defaults to a lowercase version
of the class name but can be overriden.
Methods can be specified, note that HEAD is automatically generated by
Flask to execute the GET method without returning a body.
"""
obj_name = endpoint.cls.__name__.lower() if name is None else name
methods = [m.upper() for m in methods]
for verb in 'OPTIONS', 'POST', 'GET', 'PUT', 'PATCH', 'DELETE':
if verb not in methods:
continue
l = wrap_verb_call(call=getattr(self, '_%s' % verb.lower()),
endpoint=endpoint,
data_in=self._hook_data_in,
data_out=self._hook_data_out)
self._register(obj_name=obj_name,
verb=verb,
func=l)
#
# Verbs
#
def _options(self, endpoint, path, data):
"""HTTP Verb endpoint"""
return self._routes
def _post(self, endpoint, path, data):
"""HTTP Verb endpoint"""
o = endpoint.create(path)
if data is not None:
self._fill(endpoint, o, data)
return response_redirect(endpoint, o, 201)
def _get(self, endpoint, path, data):
"""HTTP Verb endpoint"""
return endpoint.read(path)
def _put(self, endpoint, path, data):
"""HTTP Verb endpoint"""
created = False
try:
o = endpoint.read(path)
except NotFoundError:
o = endpoint.create(path)
created = True
self._fill(endpoint, o, data)
if created:
return response_redirect(endpoint, o, 201)
def _patch(self, endpoint, path, data):
"""HTTP Verb endpoint"""
o = endpoint.read(path)
self._update(endpoint, o, data)
def _delete(self, endpoint, path, data):
"""HTTP Verb endpoint"""
endpoint.delete(path)
#
# Tools
#
def _update(self, endpoint, o, data):
for k in data:
assert k in endpoint.writeable_keys, \
"Cannot update key %s, valid keys for update: %s" % \
(k, ', '.join(endpoint.writeable_keys))
setattr(o, k, data[k])
endpoint.finalize(o)
def _fill(self, endpoint, o, data):
items_set = set(endpoint.writeable_keys)
keys_set = set(data.keys())
assert items_set == keys_set, \
"The provided keys (%s) do not match the expected items (%s)" % \
(', '.join(keys_set), ', '.join(items_set))
self._update(endpoint, o, data)
def _register(self, obj_name, verb, func):
func.provide_automatic_options = False
route = '/%s/<path:path>' % obj_name
self._app.route(route,
methods=(verb,),
endpoint="%s:%s" % (verb, route))(func)
self._reg_options(verb, route)
if verb in ('OPTIONS', 'GET', 'POST'):
route = '/%s/' % obj_name
self._app.route(route,
methods=(verb,),
endpoint="%s:%s" % (verb, route),
defaults={'path': None})(func)
self._reg_options(verb, route)
def _reg_options(self, verb, route):
verbs = self._routes.get(route, [])
verbs.append(verb)
if verb == 'GET':
# Flask adds 'HEAD' for GET
verbs.append('HEAD')
self._routes[route] = verbs
class Endpoint(object):
"""
Base Endpoint object.
"""
def __init__(self, cls, id_key, writeable_keys):
"""
cls: Class of object being represented by this endpoint
id_key: Identifying key of an object
writeable_keys: A list of keys that may be written to on an object
"""
self.cls = cls
self.id_key = id_key
self.writeable_keys = writeable_keys
def create(self, path=None):
"""Create a new object"""
raise NotImplementedError()
def read(self, path):
"""Load an existing object"""
raise NotImplementedError()
def finalize(self, obj):
"""Save an object (if required)"""
raise NotImplementedError()
def delete(self, path):
"""Delete the data for the provided ID"""
raise NotImplementedError()
#
# SQLAlchemy Land
#
def row2dict(row):
"""
Convert a SQLAlchemy row/object to a dict, found on:
http://stackoverflow.com/questions/
1958219/convert-sqlalchemy-row-object-to-python-dict
"""
d = {}
for col_name in row.__table__.columns.keys():
d[col_name] = getattr(row, col_name)
return d
class SqlAlchemyEndpoint(Endpoint):
def __init__(self, db, cls, items):
from sqlalchemy.orm import class_mapper
self.db = db
self.pk = class_mapper(cls).primary_key[0]
super(SqlAlchemyEndpoint, self).__init__(cls, self.pk.name, items)
def create(self, path=None):
o = self.cls()
if path is not None:
setattr(o, self.id_key, path)
return o
def read(self, path):
if path == None:
return [pk[0] for pk in \
self.db.session.query(self.pk).all()]
try:
return self.cls.query.filter(self.pk == path).all()[0]
except IndexError:
raise NotFoundError(self.cls, path)
def finalize(self, obj):
self.db.session.add(obj)
self.db.session.commit()
def delete(self, path):
o = self.read(path)
self.db.session.delete(o)
| mit | 4,601,263,358,545,292,300 | 27.679641 | 81 | 0.540766 | false | 3.981297 | false | false | false |
Weasyl/weasyl | weasyl/controllers/messages.py | 1 | 2514 | import itertools
from pyramid.httpexceptions import HTTPSeeOther
from pyramid.response import Response
from weasyl import define, message
from weasyl.controllers.decorators import login_required, token_checked
"""Contains view callables dealing with notification messages."""
@login_required
@token_checked
def messages_remove_(request):
form = request.web_input(recall='', remove=[])
remove_all_before = form.get('remove-all-before')
if remove_all_before:
message.remove_all_before(request.userid, int(remove_all_before))
elif form.get('remove-all-submissions'):
message.remove_all_submissions(request.userid, define.get_int(form['remove-all-submissions']))
else:
message.remove(request.userid, list(map(int, form.remove)))
if form.recall:
raise HTTPSeeOther(location="/messages/submissions")
else:
raise HTTPSeeOther(location="/messages/notifications")
def tag_section(results, section):
for row in results:
row['section'] = section
return results
def sort_notifications(notifications):
return [
row
for key, group in itertools.groupby(
notifications, lambda row: message.notification_clusters.get(row['type']))
for row in sorted(group, key=lambda row: row['unixtime'], reverse=True)
]
@login_required
def messages_notifications_(request):
""" todo finish listing of message types in the template """
notifications = (
tag_section(message.select_site_updates(request.userid), 'notifications') +
tag_section(message.select_comments(request.userid), 'comments') +
tag_section(message.select_notifications(request.userid), 'notifications') +
tag_section(message.select_journals(request.userid), 'journals')
)
define._page_header_info.refresh(request.userid)
return Response(define.webpage(request.userid, "message/notifications.html", [
sort_notifications(notifications),
]))
@login_required
def messages_submissions_(request):
form = request.web_input(feature="", backtime=None, nexttime=None)
define._page_header_info.refresh(request.userid)
return Response(define.webpage(request.userid, "message/submissions_thumbnails.html", [
# Feature
form.feature,
# Submissions
message.select_submissions(request.userid, 66, include_tags=False,
backtime=define.get_int(form.backtime), nexttime=define.get_int(form.nexttime)),
]))
| apache-2.0 | 1,569,868,763,633,523,200 | 32.972973 | 115 | 0.696897 | false | 4.081169 | false | false | false |
dfang/odoo | addons/product/models/product_pricelist.py | 6 | 23258 | # -*- coding: utf-8 -*-
# Part of Odoo. See LICENSE file for full copyright and licensing details.
from itertools import chain
from odoo import api, fields, models, tools, _
from odoo.exceptions import UserError, ValidationError
import odoo.addons.decimal_precision as dp
class Pricelist(models.Model):
_name = "product.pricelist"
_description = "Pricelist"
_order = "sequence asc, id desc"
def _get_default_currency_id(self):
return self.env.user.company_id.currency_id.id
def _get_default_item_ids(self):
ProductPricelistItem = self.env['product.pricelist.item']
vals = ProductPricelistItem.default_get(ProductPricelistItem._fields.keys())
vals.update(compute_price='formula')
return [[0, False, vals]]
name = fields.Char('Pricelist Name', required=True, translate=True)
active = fields.Boolean('Active', default=True, help="If unchecked, it will allow you to hide the pricelist without removing it.")
item_ids = fields.One2many(
'product.pricelist.item', 'pricelist_id', 'Pricelist Items',
copy=True, default=_get_default_item_ids)
currency_id = fields.Many2one('res.currency', 'Currency', default=_get_default_currency_id, required=True)
company_id = fields.Many2one('res.company', 'Company')
sequence = fields.Integer(default=16)
country_group_ids = fields.Many2many('res.country.group', 'res_country_group_pricelist_rel',
'pricelist_id', 'res_country_group_id', string='Country Groups')
@api.multi
def name_get(self):
return [(pricelist.id, '%s (%s)' % (pricelist.name, pricelist.currency_id.name)) for pricelist in self]
@api.model
def name_search(self, name, args=None, operator='ilike', limit=100):
if name and operator == '=' and not args:
# search on the name of the pricelist and its currency, opposite of name_get(),
# Used by the magic context filter in the product search view.
query_args = {'name': name, 'limit': limit, 'lang': self._context.get('lang', 'en_US')}
query = """SELECT p.id
FROM ((
SELECT pr.id, pr.name
FROM product_pricelist pr JOIN
res_currency cur ON
(pr.currency_id = cur.id)
WHERE pr.name || ' (' || cur.name || ')' = %(name)s
)
UNION (
SELECT tr.res_id as id, tr.value as name
FROM ir_translation tr JOIN
product_pricelist pr ON (
pr.id = tr.res_id AND
tr.type = 'model' AND
tr.name = 'product.pricelist,name' AND
tr.lang = %(lang)s
) JOIN
res_currency cur ON
(pr.currency_id = cur.id)
WHERE tr.value || ' (' || cur.name || ')' = %(name)s
)
) p
ORDER BY p.name"""
if limit:
query += " LIMIT %(limit)s"
self._cr.execute(query, query_args)
ids = [r[0] for r in self._cr.fetchall()]
# regular search() to apply ACLs - may limit results below limit in some cases
pricelists = self.search([('id', 'in', ids)], limit=limit)
if pricelists:
return pricelists.name_get()
return super(Pricelist, self).name_search(name, args, operator=operator, limit=limit)
def _compute_price_rule_multi(self, products_qty_partner, date=False, uom_id=False):
""" Low-level method - Multi pricelist, multi products
Returns: dict{product_id: dict{pricelist_id: (price, suitable_rule)} }"""
if not self.ids:
pricelists = self.search([])
else:
pricelists = self
results = {}
for pricelist in pricelists:
subres = pricelist._compute_price_rule(products_qty_partner, date=date, uom_id=uom_id)
for product_id, price in subres.items():
results.setdefault(product_id, {})
results[product_id][pricelist.id] = price
return results
@api.multi
def _compute_price_rule(self, products_qty_partner, date=False, uom_id=False):
""" Low-level method - Mono pricelist, multi products
Returns: dict{product_id: (price, suitable_rule) for the given pricelist}
If date in context: Date of the pricelist (%Y-%m-%d)
:param products_qty_partner: list of typles products, quantity, partner
:param datetime date: validity date
:param ID uom_id: intermediate unit of measure
"""
self.ensure_one()
if not date:
date = self._context.get('date', fields.Date.today())
if not uom_id and self._context.get('uom'):
uom_id = self._context['uom']
if uom_id:
# rebrowse with uom if given
product_ids = [item[0].id for item in products_qty_partner]
products = self.env['product.product'].with_context(uom=uom_id).browse(product_ids)
products_qty_partner = [(products[index], data_struct[1], data_struct[2]) for index, data_struct in enumerate(products_qty_partner)]
else:
products = [item[0] for item in products_qty_partner]
if not products:
return {}
categ_ids = {}
for p in products:
categ = p.categ_id
while categ:
categ_ids[categ.id] = True
categ = categ.parent_id
categ_ids = categ_ids.keys()
is_product_template = products[0]._name == "product.template"
if is_product_template:
prod_tmpl_ids = [tmpl.id for tmpl in products]
# all variants of all products
prod_ids = [p.id for p in
list(chain.from_iterable([t.product_variant_ids for t in products]))]
else:
prod_ids = [product.id for product in products]
prod_tmpl_ids = [product.product_tmpl_id.id for product in products]
# Load all rules
self._cr.execute(
'SELECT item.id '
'FROM product_pricelist_item AS item '
'LEFT JOIN product_category AS categ '
'ON item.categ_id = categ.id '
'WHERE (item.product_tmpl_id IS NULL OR item.product_tmpl_id = any(%s))'
'AND (item.product_id IS NULL OR item.product_id = any(%s))'
'AND (item.categ_id IS NULL OR item.categ_id = any(%s)) '
'AND (item.pricelist_id = %s) '
'AND (item.date_start IS NULL OR item.date_start<=%s) '
'AND (item.date_end IS NULL OR item.date_end>=%s)'
'ORDER BY item.applied_on, item.min_quantity desc, categ.parent_left desc',
(prod_tmpl_ids, prod_ids, categ_ids, self.id, date, date))
item_ids = [x[0] for x in self._cr.fetchall()]
items = self.env['product.pricelist.item'].browse(item_ids)
results = {}
for product, qty, partner in products_qty_partner:
results[product.id] = 0.0
suitable_rule = False
# Final unit price is computed according to `qty` in the `qty_uom_id` UoM.
# An intermediary unit price may be computed according to a different UoM, in
# which case the price_uom_id contains that UoM.
# The final price will be converted to match `qty_uom_id`.
qty_uom_id = self._context.get('uom') or product.uom_id.id
price_uom_id = product.uom_id.id
qty_in_product_uom = qty
if qty_uom_id != product.uom_id.id:
try:
qty_in_product_uom = self.env['product.uom'].browse([self._context['uom']])._compute_quantity(qty, product.uom_id)
except UserError:
# Ignored - incompatible UoM in context, use default product UoM
pass
# if Public user try to access standard price from website sale, need to call price_compute.
# TDE SURPRISE: product can actually be a template
price = product.price_compute('list_price')[product.id]
price_uom = self.env['product.uom'].browse([qty_uom_id])
for rule in items:
if rule.min_quantity and qty_in_product_uom < rule.min_quantity:
continue
if is_product_template:
if rule.product_tmpl_id and product.id != rule.product_tmpl_id.id:
continue
if rule.product_id and not (product.product_variant_count == 1 and product.product_variant_id.id == rule.product_id.id):
# product rule acceptable on template if has only one variant
continue
else:
if rule.product_tmpl_id and product.product_tmpl_id.id != rule.product_tmpl_id.id:
continue
if rule.product_id and product.id != rule.product_id.id:
continue
if rule.categ_id:
cat = product.categ_id
while cat:
if cat.id == rule.categ_id.id:
break
cat = cat.parent_id
if not cat:
continue
if rule.base == 'pricelist' and rule.base_pricelist_id:
price_tmp = rule.base_pricelist_id._compute_price_rule([(product, qty, partner)])[product.id][0] # TDE: 0 = price, 1 = rule
price = rule.base_pricelist_id.currency_id.compute(price_tmp, self.currency_id, round=False)
else:
# if base option is public price take sale price else cost price of product
# price_compute returns the price in the context UoM, i.e. qty_uom_id
price = product.price_compute(rule.base)[product.id]
convert_to_price_uom = (lambda price: product.uom_id._compute_price(price, price_uom))
if price is not False:
if rule.compute_price == 'fixed':
price = convert_to_price_uom(rule.fixed_price)
elif rule.compute_price == 'percentage':
price = (price - (price * (rule.percent_price / 100))) or 0.0
else:
# complete formula
price_limit = price
price = (price - (price * (rule.price_discount / 100))) or 0.0
if rule.price_round:
price = tools.float_round(price, precision_rounding=rule.price_round)
if rule.price_surcharge:
price_surcharge = convert_to_price_uom(rule.price_surcharge)
price += price_surcharge
if rule.price_min_margin:
price_min_margin = convert_to_price_uom(rule.price_min_margin)
price = max(price, price_limit + price_min_margin)
if rule.price_max_margin:
price_max_margin = convert_to_price_uom(rule.price_max_margin)
price = min(price, price_limit + price_max_margin)
suitable_rule = rule
break
# Final price conversion into pricelist currency
if suitable_rule and suitable_rule.compute_price != 'fixed' and suitable_rule.base != 'pricelist':
price = product.currency_id.compute(price, self.currency_id, round=False)
results[product.id] = (price, suitable_rule and suitable_rule.id or False)
return results
# New methods: product based
def get_products_price(self, products, quantities, partners, date=False, uom_id=False):
""" For a given pricelist, return price for products
Returns: dict{product_id: product price}, in the given pricelist """
self.ensure_one()
return dict((product_id, res_tuple[0]) for product_id, res_tuple in self._compute_price_rule(zip(products, quantities, partners), date=date, uom_id=uom_id).iteritems())
def get_product_price(self, product, quantity, partner, date=False, uom_id=False):
""" For a given pricelist, return price for a given product """
self.ensure_one()
return self._compute_price_rule([(product, quantity, partner)], date=date, uom_id=uom_id)[product.id][0]
def get_product_price_rule(self, product, quantity, partner, date=False, uom_id=False):
""" For a given pricelist, return price and rule for a given product """
self.ensure_one()
return self._compute_price_rule([(product, quantity, partner)], date=date, uom_id=uom_id)[product.id]
# Compatibility to remove after v10 - DEPRECATED
@api.model
def _price_rule_get_multi(self, pricelist, products_by_qty_by_partner):
""" Low level method computing the result tuple for a given pricelist and multi products - return tuple """
return pricelist._compute_price_rule(products_by_qty_by_partner)
@api.multi
def price_get(self, prod_id, qty, partner=None):
""" Multi pricelist, mono product - returns price per pricelist """
return dict((key, price[0]) for key, price in self.price_rule_get(prod_id, qty, partner=partner).items())
@api.multi
def price_rule_get_multi(self, products_by_qty_by_partner):
""" Multi pricelist, multi product - return tuple """
return self._compute_price_rule_multi(products_by_qty_by_partner)
@api.multi
def price_rule_get(self, prod_id, qty, partner=None):
""" Multi pricelist, mono product - return tuple """
product = self.env['product.product'].browse([prod_id])
return self._compute_price_rule_multi([(product, qty, partner)])[prod_id]
@api.model
def _price_get_multi(self, pricelist, products_by_qty_by_partner):
""" Mono pricelist, multi product - return price per product """
return pricelist.get_products_price(zip(**products_by_qty_by_partner))
def _get_partner_pricelist(self, partner_id, company_id=None):
""" Retrieve the applicable pricelist for a given partner in a given company.
:param company_id: if passed, used for looking up properties,
instead of current user's company
"""
Partner = self.env['res.partner']
Property = self.env['ir.property'].with_context(force_company=company_id or self.env.user.company_id.id)
p = Partner.browse(partner_id)
pl = Property.get('property_product_pricelist', Partner._name, '%s,%s' % (Partner._name, p.id))
if pl:
pl = pl[0].id
if not pl:
if p.country_id.code:
pls = self.env['product.pricelist'].search([('country_group_ids.country_ids.code', '=', p.country_id.code)], limit=1)
pl = pls and pls[0].id
if not pl:
# search pl where no country
pls = self.env['product.pricelist'].search([('country_group_ids', '=', False)], limit=1)
pl = pls and pls[0].id
if not pl:
prop = Property.get('property_product_pricelist', 'res.partner')
pl = prop and prop[0].id
if not pl:
pls = self.env['product.pricelist'].search([], limit=1)
pl = pls and pls[0].id
return pl
class ResCountryGroup(models.Model):
_inherit = 'res.country.group'
pricelist_ids = fields.Many2many('product.pricelist', 'res_country_group_pricelist_rel',
'res_country_group_id', 'pricelist_id', string='Pricelists')
class PricelistItem(models.Model):
_name = "product.pricelist.item"
_description = "Pricelist item"
_order = "applied_on, min_quantity desc, categ_id desc"
product_tmpl_id = fields.Many2one(
'product.template', 'Product Template', ondelete='cascade',
help="Specify a template if this rule only applies to one product template. Keep empty otherwise.")
product_id = fields.Many2one(
'product.product', 'Product', ondelete='cascade',
help="Specify a product if this rule only applies to one product. Keep empty otherwise.")
categ_id = fields.Many2one(
'product.category', 'Product Category', ondelete='cascade',
help="Specify a product category if this rule only applies to products belonging to this category or its children categories. Keep empty otherwise.")
min_quantity = fields.Integer(
'Min. Quantity', default=1,
help="For the rule to apply, bought/sold quantity must be greater "
"than or equal to the minimum quantity specified in this field.\n"
"Expressed in the default unit of measure of the product.")
applied_on = fields.Selection([
('3_global', 'Global'),
('2_product_category', ' Product Category'),
('1_product', 'Product'),
('0_product_variant', 'Product Variant')], "Apply On",
default='3_global', required=True,
help='Pricelist Item applicable on selected option')
sequence = fields.Integer(
'Sequence', default=5, required=True,
help="Gives the order in which the pricelist items will be checked. The evaluation gives highest priority to lowest sequence and stops as soon as a matching item is found.")
base = fields.Selection([
('list_price', 'Public Price'),
('standard_price', 'Cost'),
('pricelist', 'Other Pricelist')], "Based on",
default='list_price', required=True,
help='Base price for computation.\n'
'Public Price: The base price will be the Sale/public Price.\n'
'Cost Price : The base price will be the cost price.\n'
'Other Pricelist : Computation of the base price based on another Pricelist.')
base_pricelist_id = fields.Many2one('product.pricelist', 'Other Pricelist')
pricelist_id = fields.Many2one('product.pricelist', 'Pricelist', index=True, ondelete='cascade')
price_surcharge = fields.Float(
'Price Surcharge', digits=dp.get_precision('Product Price'),
help='Specify the fixed amount to add or substract(if negative) to the amount calculated with the discount.')
price_discount = fields.Float('Price Discount', default=0, digits=(16, 2))
price_round = fields.Float(
'Price Rounding', digits=dp.get_precision('Product Price'),
help="Sets the price so that it is a multiple of this value.\n"
"Rounding is applied after the discount and before the surcharge.\n"
"To have prices that end in 9.99, set rounding 10, surcharge -0.01")
price_min_margin = fields.Float(
'Min. Price Margin', digits=dp.get_precision('Product Price'),
help='Specify the minimum amount of margin over the base price.')
price_max_margin = fields.Float(
'Max. Price Margin', digits=dp.get_precision('Product Price'),
help='Specify the maximum amount of margin over the base price.')
company_id = fields.Many2one(
'res.company', 'Company',
readonly=True, related='pricelist_id.company_id', store=True)
currency_id = fields.Many2one(
'res.currency', 'Currency',
readonly=True, related='pricelist_id.currency_id', store=True)
date_start = fields.Date('Start Date', help="Starting date for the pricelist item validation")
date_end = fields.Date('End Date', help="Ending valid for the pricelist item validation")
compute_price = fields.Selection([
('fixed', 'Fix Price'),
('percentage', 'Percentage (discount)'),
('formula', 'Formula')], index=True, default='fixed')
fixed_price = fields.Float('Fixed Price', digits=dp.get_precision('Product Price'))
percent_price = fields.Float('Percentage Price')
# functional fields used for usability purposes
name = fields.Char(
'Name', compute='_get_pricelist_item_name_price',
help="Explicit rule name for this pricelist line.")
price = fields.Char(
'Price', compute='_get_pricelist_item_name_price',
help="Explicit rule name for this pricelist line.")
@api.constrains('base_pricelist_id', 'pricelist_id', 'base')
def _check_recursion(self):
if any(item.base == 'pricelist' and item.pricelist_id and item.pricelist_id == item.base_pricelist_id for item in self):
raise ValidationError(_('Error! You cannot assign the Main Pricelist as Other Pricelist in PriceList Item!'))
return True
@api.constrains('price_min_margin', 'price_max_margin')
def _check_margin(self):
if any(item.price_min_margin > item.price_max_margin for item in self):
raise ValidationError(_('Error! The minimum margin should be lower than the maximum margin.'))
return True
@api.one
@api.depends('categ_id', 'product_tmpl_id', 'product_id', 'compute_price', 'fixed_price', \
'pricelist_id', 'percent_price', 'price_discount', 'price_surcharge')
def _get_pricelist_item_name_price(self):
if self.categ_id:
self.name = _("Category: %s") % (self.categ_id.name)
elif self.product_tmpl_id:
self.name = self.product_tmpl_id.name
elif self.product_id:
self.name = self.product_id.display_name.replace('[%s]' % self.product_id.code, '')
else:
self.name = _("All Products")
if self.compute_price == 'fixed':
self.price = ("%s %s") % (self.fixed_price, self.pricelist_id.currency_id.name)
elif self.compute_price == 'percentage':
self.price = _("%s %% discount") % (self.percent_price)
else:
self.price = _("%s %% discount and %s surcharge") % (abs(self.price_discount), self.price_surcharge)
@api.onchange('applied_on')
def _onchange_applied_on(self):
if self.applied_on != '0_product_variant':
self.product_id = False
if self.applied_on != '1_product':
self.product_tmpl_id = False
if self.applied_on != '2_product_category':
self.categ_id = False
@api.onchange('compute_price')
def _onchange_compute_price(self):
if self.compute_price != 'fixed':
self.fixed_price = 0.0
if self.compute_price != 'percentage':
self.percent_price = 0.0
if self.compute_price != 'formula':
self.update({
'price_discount': 0.0,
'price_surcharge': 0.0,
'price_round': 0.0,
'price_min_margin': 0.0,
'price_max_margin': 0.0,
})
| agpl-3.0 | 8,119,668,476,666,350,000 | 48.802998 | 181 | 0.581735 | false | 4.091837 | false | false | false |
Rubisk/mcedit2 | src/mcedit2/rendering/rendergraph.py | 1 | 11465 | """
${NAME}
"""
from __future__ import absolute_import, division, print_function
import collections
import logging
import weakref
from OpenGL import GL
import numpy
from mcedit2.rendering import cubes
from mcedit2.rendering.depths import DepthOffset
from mcedit2.util import profiler
from mcedit2.util.glutils import DisplayList, gl
log = logging.getLogger(__name__)
class RenderNode(object):
def __init__(self, sceneNode):
super(RenderNode, self).__init__()
self.children = []
self.childrenBySceneNode = {}
self.sceneNode = sceneNode
self.displayList = DisplayList() # Recompiled whenever this node's scenegraph node is dirty
# or node gains or loses children
self.childNeedsRecompile = True
def __repr__(self):
return "%s(%s)" % (self.__class__.__name__, self.sceneNode)
_parent = None
@property
def parent(self):
if self._parent:
return self._parent()
@parent.setter
def parent(self, value):
if value is not None:
self._parent = weakref.ref(value)
else:
self._parent = None
def addChild(self, node):
self.children.append(node)
self._addChild(node)
def _addChild(self, node):
self.childrenBySceneNode[node.sceneNode] = node
node.parent = self
self.displayList.invalidate()
self.childNeedsRecompile = True
if self.parent:
self.parent.touch()
def insertNode(self, index, node):
self.children.insert(index, node)
self._addChild(node)
def removeChild(self, node):
self.childrenBySceneNode.pop(node.sceneNode, None)
self.children.remove(node)
self.displayList.invalidate()
node.parent = None
self.childNeedsRecompile = True
if self.parent:
self.parent.touch()
def invalidate(self):
self.displayList.invalidate()
self.touch()
def touch(self):
node = self
while node:
node.childNeedsRecompile = True
node = node.parent
def getList(self):
return self.displayList.getList()
def callList(self):
self.displayList.call()
def compile(self):
if self.childNeedsRecompile:
for node in self.children:
if node.sceneNode.visible:
node.compile()
self.childNeedsRecompile = False
self.displayList.compile(self.draw)
def draw(self):
self.drawSelf()
self.drawChildren()
def drawChildren(self):
if len(self.children):
lists = [node.getList()
for node in self.children
if node.sceneNode.visible]
if len(lists):
lists = numpy.hstack(tuple(lists))
try:
GL.glCallLists(lists)
except GL.error as e:
log.exception("Error calling child lists: %s", e)
raise
def drawSelf(self):
pass
def destroy(self):
for child in self.children:
child.destroy()
self.displayList.destroy()
class RenderstateRenderNode(RenderNode):
def draw(self):
self.enter()
self.drawChildren()
self.exit()
def enter(self):
raise NotImplementedError
def exit(self):
raise NotImplementedError
class TextureAtlasRenderNode(RenderstateRenderNode):
def __init__(self, sceneNode):
super(TextureAtlasRenderNode, self).__init__(sceneNode)
self.sceneNode = sceneNode
def enter(self):
if self.sceneNode.textureAtlas is None:
return
GL.glColor(1., 1., 1., 1.)
textureAtlas = self.sceneNode.textureAtlas
GL.glActiveTexture(GL.GL_TEXTURE0)
GL.glEnable(GL.GL_TEXTURE_2D)
textureAtlas.bindTerrain()
GL.glMatrixMode(GL.GL_TEXTURE)
GL.glPushMatrix()
GL.glLoadIdentity()
GL.glScale(1. / textureAtlas.width, 1. / textureAtlas.height, 1.)
GL.glActiveTexture(GL.GL_TEXTURE1)
GL.glEnable(GL.GL_TEXTURE_2D)
textureAtlas.bindLight()
GL.glMatrixMode(GL.GL_TEXTURE)
GL.glPushMatrix()
GL.glLoadIdentity()
GL.glScale(1. / 16, 1. / 16, 1.)
GL.glActiveTexture(GL.GL_TEXTURE0)
GL.glEnable(GL.GL_CULL_FACE)
def exit(self):
if self.sceneNode.textureAtlas is None:
return
GL.glDisable(GL.GL_CULL_FACE)
GL.glActiveTexture(GL.GL_TEXTURE1)
GL.glBindTexture(GL.GL_TEXTURE_2D, 0)
GL.glDisable(GL.GL_TEXTURE_2D)
GL.glMatrixMode(GL.GL_TEXTURE)
GL.glPopMatrix()
GL.glActiveTexture(GL.GL_TEXTURE0)
GL.glDisable(GL.GL_TEXTURE_2D)
GL.glMatrixMode(GL.GL_TEXTURE)
GL.glPopMatrix()
class TranslateRenderNode(RenderstateRenderNode):
def __init__(self, sceneNode):
"""
:type sceneNode: TranslateNode
"""
super(TranslateRenderNode, self).__init__(sceneNode)
def __repr__(self):
return "TranslateRenderNode(%s)" % (self.sceneNode.translateOffset,)
def enter(self):
GL.glMatrixMode(GL.GL_MODELVIEW)
GL.glPushMatrix()
GL.glTranslate(*self.sceneNode.translateOffset)
def exit(self):
GL.glMatrixMode(GL.GL_MODELVIEW)
GL.glPopMatrix()
class VertexRenderNode(RenderNode):
def __init__(self, sceneNode):
"""
:type sceneNode: VertexNode
"""
super(VertexRenderNode, self).__init__(sceneNode)
self.didDraw = False
def invalidate(self):
if self.didDraw:
assert False
super(VertexRenderNode, self).invalidate()
def drawSelf(self):
self.didDraw = True
bare = []
withTex = []
withLights = []
for array in self.sceneNode.vertexArrays:
if array.lights:
withLights.append(array)
elif array.textures:
withTex.append(array)
else:
bare.append(array)
with gl.glPushAttrib(GL.GL_ENABLE_BIT):
GL.glDisable(GL.GL_TEXTURE_2D)
self.drawArrays(bare, False, False)
GL.glEnable(GL.GL_TEXTURE_2D)
self.drawArrays(withTex, True, False)
self.drawArrays(withLights, True, True)
def drawArrays(self, vertexArrays, textures, lights):
if textures:
GL.glClientActiveTexture(GL.GL_TEXTURE0)
GL.glEnableClientState(GL.GL_TEXTURE_COORD_ARRAY)
if lights:
GL.glClientActiveTexture(GL.GL_TEXTURE1)
GL.glEnableClientState(GL.GL_TEXTURE_COORD_ARRAY)
else:
GL.glMultiTexCoord2d(GL.GL_TEXTURE1, 15, 15)
GL.glEnableClientState(GL.GL_COLOR_ARRAY)
for array in vertexArrays:
if 0 == len(array.buffer):
continue
stride = 4 * array.elements
buf = array.buffer.ravel()
GL.glVertexPointer(3, GL.GL_FLOAT, stride, buf)
if textures:
GL.glClientActiveTexture(GL.GL_TEXTURE0)
GL.glTexCoordPointer(2, GL.GL_FLOAT, stride, (buf[array.texOffset:]))
if lights:
GL.glClientActiveTexture(GL.GL_TEXTURE1)
GL.glTexCoordPointer(2, GL.GL_FLOAT, stride, (buf[array.lightOffset:]))
GL.glColorPointer(4, GL.GL_UNSIGNED_BYTE, stride, (buf.view(dtype=numpy.uint8)[array.rgbaOffset*4:]))
vertexCount = int(array.buffer.size / array.elements)
GL.glDrawArrays(array.gl_type, 0, vertexCount)
GL.glDisableClientState(GL.GL_COLOR_ARRAY)
if lights:
GL.glDisableClientState(GL.GL_TEXTURE_COORD_ARRAY)
if textures:
GL.glClientActiveTexture(GL.GL_TEXTURE0)
GL.glDisableClientState(GL.GL_TEXTURE_COORD_ARRAY)
class OrthoRenderNode(RenderstateRenderNode):
def enter(self):
w, h = self.sceneNode.size
GL.glMatrixMode(GL.GL_PROJECTION)
GL.glPushMatrix()
GL.glLoadIdentity()
GL.glOrtho(0., w, 0., h, -200, 200)
def exit(self):
GL.glMatrixMode(GL.GL_PROJECTION)
GL.glPopMatrix()
class ClearRenderNode(RenderNode):
def drawSelf(self):
color = self.sceneNode.clearColor
if color is None:
GL.glClear(GL.GL_DEPTH_BUFFER_BIT)
else:
GL.glClearColor(*color)
GL.glClear(GL.GL_COLOR_BUFFER_BIT | GL.GL_DEPTH_BUFFER_BIT)
class DepthMaskRenderNode(RenderstateRenderNode):
def enter(self):
GL.glPushAttrib(GL.GL_DEPTH_BUFFER_BIT)
GL.glDepthMask(self.sceneNode.mask)
def exit(self):
GL.glPopAttrib()
class BoxRenderNode(RenderNode):
def drawSelf(self):
box = self.sceneNode.box
color = self.sceneNode.color
GL.glPolygonOffset(DepthOffset.Selection, DepthOffset.Selection)
cubes.drawConstructionCube(box, color)
class BoxFaceRenderNode(RenderNode):
def drawBoxFace(self, box, face, color=(0.9, 0.6, 0.2, 0.5)):
GL.glEnable(GL.GL_BLEND)
GL.glColor(*color)
cubes.drawFace(box, face)
GL.glColor(0.9, 0.6, 0.2, 0.8)
GL.glLineWidth(2.0)
cubes.drawFace(box, face, elementType=GL.GL_LINE_STRIP)
GL.glDisable(GL.GL_BLEND)
class DepthOffsetRenderNode(RenderstateRenderNode):
def enter(self):
GL.glPushAttrib(GL.GL_POLYGON_BIT)
GL.glPolygonOffset(self.sceneNode.depthOffset, self.sceneNode.depthOffset)
GL.glEnable(GL.GL_POLYGON_OFFSET_FILL)
def exit(self):
GL.glPopAttrib()
def updateRenderNode(renderNode):
"""
:type renderNode: mcedit2.rendering.rendergraph.RenderNode
"""
sceneNode = renderNode.sceneNode
if sceneNode.dirty:
renderNode.invalidate()
sceneNode.dirty = False
if sceneNode.descendentChildrenChanged or sceneNode.childrenChanged:
updateChildren(renderNode)
sceneNode.descendentChildrenChanged = False
sceneNode.childrenChanged = False
def createRenderNode(sceneNode):
"""
:type sceneNode: Node
:rtype: mcedit2.rendering.rendergraph.RenderNode
"""
renderNode = sceneNode.RenderNodeClass(sceneNode)
updateChildren(renderNode)
return renderNode
def updateChildren(renderNode):
"""
:type renderNode: mcedit2.rendering.rendergraph.RenderNode
:return:
:rtype:
"""
sceneNode = renderNode.sceneNode
deadChildren = []
for renderChild in renderNode.children:
if renderChild.sceneNode.parent is None:
deadChildren.append(renderChild)
for dc in deadChildren:
renderNode.removeChild(dc)
dc.destroy()
for index, sceneChild in enumerate(sceneNode.children):
renderChild = renderNode.childrenBySceneNode.get(sceneChild)
if renderChild is None:
renderNode.insertNode(index, createRenderNode(sceneChild))
sceneChild.dirty = False
else:
updateRenderNode(renderChild)
def renderScene(renderNode):
with profiler.context("updateRenderNode"):
updateRenderNode(renderNode)
with profiler.context("renderNode.compile"):
renderNode.compile()
with profiler.context("renderNode.callList"):
renderNode.callList()
| bsd-3-clause | -7,526,664,301,518,456,000 | 27.95202 | 113 | 0.614653 | false | 3.78258 | false | false | false |
google/jws | jws/ecdsa_verify.py | 1 | 2742 | # Copyright 2018 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# https://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""An implementation of PublicKeyVerify for ECDSA."""
__author__ = "quannguyen@google.com (Quan Nguyen)"
from cryptography import exceptions
from cryptography.hazmat.primitives import hashes
from cryptography.hazmat.primitives.asymmetric import ec
import six
from .public_key_verify import PublicKeyVerify
class EcdsaVerify(PublicKeyVerify):
"""ECDSA verifying with cryptography.io."""
def __init__(self, pub_key, algorithm):
"""Constructor for EcdsaVerify.
Args:
pub_key: ec.EllipticCurvePublicKey, the Ecdsa public key.
algorithm: string, Ecdsa algorithm as defined at
https://tools.ietf.org/html/rfc7518#section-3.1.
Raises:
TypeError: if the public key is not an instance of
ec.EllipticCurvePublicKey.
UnsupportedAlgorithm: if the algorithm is not supported.
"""
if not isinstance(pub_key, ec.EllipticCurvePublicKey):
raise TypeError(
"The public key must be an instance of ec.EllipticCurvePublicKey")
self.pub_key = pub_key
curve_name = ""
if algorithm == "ES256":
self.hash = hashes.SHA256()
curve_name = "secp256r1"
elif algorithm == "ES384":
self.hash = hashes.SHA384()
curve_name = "secp384r1"
elif algorithm == "ES512":
self.hash = hashes.SHA512()
curve_name = "secp521r1"
else:
raise exceptions.UnsupportedAlgorithm(
"Unknown algorithm : %s" % (algorithm))
# In Ecdsa, both the key and the algorithm define the curve. Therefore, we
# must cross check them to make sure they're the same.
if curve_name != pub_key.curve.name:
raise exceptions.UnsupportedAlgorithm(
"The curve in public key %s and in algorithm % don't match" %
(pub_key.curve.name, curve_name))
self.algorithm = algorithm
def verify(self, signature, data):
"""See base class."""
if not isinstance(signature, six.binary_type) or not isinstance(
data, six.binary_type):
raise SecurityException("Signature and data must be bytes")
try:
self.pub_key.verify(signature, data, ec.ECDSA(self.hash))
except:
raise SecurityException("Invalid signature")
| apache-2.0 | 4,097,409,235,211,034,600 | 36.054054 | 78 | 0.698031 | false | 3.911555 | false | false | false |
zhumengyuan/kallithea | kallithea/bin/kallithea_config.py | 2 | 5327 | #!/usr/bin/env python
# -*- coding: utf-8 -*-
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
"""
kallithea.bin.kallithea_config
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
configuration generator for Kallithea
This file was forked by the Kallithea project in July 2014.
Original author and date, and relevant copyright and licensing information is below:
:created_on: Jun 18, 2013
:author: marcink
:copyright: (c) 2013 RhodeCode GmbH, and others.
:license: GPLv3, see LICENSE.md for more details.
"""
from __future__ import with_statement
import os
import sys
import uuid
import argparse
from mako.template import Template
TMPL = 'template.ini.mako'
here = os.path.dirname(os.path.abspath(__file__))
def argparser(argv):
usage = (
"kallithea-config [-h] [--filename=FILENAME] [--template=TEMPLATE] \n"
"VARS optional specify extra template variable that will be available in "
"template. Use comma separated key=val format eg.\n"
"key1=val1,port=5000,host=127.0.0.1,elements='a\,b\,c'\n"
)
parser = argparse.ArgumentParser(
description='Kallithea CONFIG generator with variable replacement',
usage=usage
)
## config
group = parser.add_argument_group('CONFIG')
group.add_argument('--filename', help='Output ini filename.')
group.add_argument('--template', help='Mako template file to use instead of '
'the default builtin template')
group.add_argument('--raw', help='Store given mako template as raw without '
'parsing. Use this to create custom template '
'initially', action='store_true')
group.add_argument('--show-defaults', help='Show all default variables for '
'builtin template', action='store_true')
args, other = parser.parse_known_args()
return parser, args, other
def _escape_split(text, sep):
"""
Allows for escaping of the separator: e.g. arg='foo\, bar'
It should be noted that the way bash et. al. do command line parsing, those
single quotes are required. a shameless ripoff from fabric project.
"""
escaped_sep = r'\%s' % sep
if escaped_sep not in text:
return text.split(sep)
before, _, after = text.partition(escaped_sep)
startlist = before.split(sep) # a regular split is fine here
unfinished = startlist[-1]
startlist = startlist[:-1]
# recurse because there may be more escaped separators
endlist = _escape_split(after, sep)
# finish building the escaped value. we use endlist[0] becaue the first
# part of the string sent in recursion is the rest of the escaped value.
unfinished += sep + endlist[0]
return startlist + [unfinished] + endlist[1:] # put together all the parts
def _run(argv):
parser, args, other = argparser(argv)
if not len(sys.argv) > 1:
print parser.print_help()
sys.exit(0)
# defaults that can be overwritten by arguments
tmpl_stored_args = {
'http_server': 'waitress',
'lang': 'en',
'database_engine': 'sqlite',
'host': '127.0.0.1',
'port': 5000,
'error_aggregation_service': None,
}
if other:
# parse arguments, we assume only first is correct
kwargs = {}
for el in _escape_split(other[0], ','):
kv = _escape_split(el, '=')
if len(kv) == 2:
k, v = kv
kwargs[k] = v
# update our template stored args
tmpl_stored_args.update(kwargs)
# use default that cannot be replaced
tmpl_stored_args.update({
'uuid': lambda: uuid.uuid4().hex,
'here': os.path.abspath(os.curdir),
})
if args.show_defaults:
for k,v in tmpl_stored_args.iteritems():
print '%s=%s' % (k, v)
sys.exit(0)
try:
# built in template
tmpl_file = os.path.join(here, TMPL)
if args.template:
tmpl_file = args.template
with open(tmpl_file, 'rb') as f:
tmpl_data = f.read()
if args.raw:
tmpl = tmpl_data
else:
tmpl = Template(tmpl_data).render(**tmpl_stored_args)
with open(args.filename, 'wb') as f:
f.write(tmpl)
print 'Wrote new config file in %s' % (os.path.abspath(args.filename))
except Exception:
from mako import exceptions
print exceptions.text_error_template().render()
def main(argv=None):
"""
Main execution function for cli
:param argv:
"""
if argv is None:
argv = sys.argv
return _run(argv)
if __name__ == '__main__':
sys.exit(main(sys.argv))
| gpl-3.0 | 5,297,344,204,185,463,000 | 32.086957 | 87 | 0.617796 | false | 3.860145 | true | false | false |
gingerpayments/python-libmt94x | libmt94x/info_acct_owner_subfields.py | 1 | 5732 | from libmt94x.remittance_info import AbstractRemittanceInfo
from libmt94x.transfer_failed_codes import TransferFailed
class InfoToAcccountOwnerSubField(object):
'''Abstract base class for all subfields of InformationToAcccountOwner'''
pass
class BeneficiaryParty(InfoToAcccountOwnerSubField):
tag = 'BENM'
def __init__(self, account_number=None, bic=None, name=None, city=None):
self.account_number = account_number
self.bic = bic
self.name = name
self.city = city
class BusinessPurpose(InfoToAcccountOwnerSubField):
tag = 'BUSP'
def __init__(self, id_code=None, sepa_transaction_type=None):
self.id_code = id_code
self.sepa_transaction_type = sepa_transaction_type
class Charges(InfoToAcccountOwnerSubField):
tag = 'CHGS'
def __init__(self, charges):
self.charges = charges
class ClientReference(InfoToAcccountOwnerSubField):
tag = 'CREF'
def __init__(self, client_reference):
self.client_reference = client_reference
class CounterPartyID(InfoToAcccountOwnerSubField):
'''NL term: Tegenpartij ID'''
tag = 'CNTP'
def __init__(self, account_number=None, bic=None, name=None, city=None):
self.account_number = account_number
self.bic = bic
self.name = name
self.city = city
class CounterPartyIdentification(InfoToAcccountOwnerSubField):
tag = 'ID'
def __init__(self, id_code):
self.id_code = id_code
class CreditorID(InfoToAcccountOwnerSubField):
'''NL term: Incassant ID'''
tag = 'CSID'
def __init__(self, creditor_id):
self.creditor_id = creditor_id
class EndToEndReference(InfoToAcccountOwnerSubField):
'''NL term: Uniek kenmerk'''
tag = 'EREF'
def __init__(self, end_to_end_reference):
self.end_to_end_reference = end_to_end_reference
class ExchangeRate(InfoToAcccountOwnerSubField):
tag = 'EXCH'
def __init__(self, exchange_rate):
self.exchange_rate = exchange_rate
class InstructionID(InfoToAcccountOwnerSubField):
tag = 'IREF'
def __init__(self, instruction_id):
self.instruction_id = instruction_id
class MandateReference(InfoToAcccountOwnerSubField):
'''NL term: Machtigingskenmerk'''
tag = 'MARF'
def __init__(self, mandate_reference):
self.mandate_reference = mandate_reference
class OrderingParty(InfoToAcccountOwnerSubField):
tag = 'ORDP'
def __init__(self, account_number=None, bic=None, name=None, city=None):
self.account_number = account_number
self.bic = bic
self.name = name
self.city = city
class PaymentInformationID(InfoToAcccountOwnerSubField):
'''NL term: Batch ID'''
tag = 'PREF'
def __init__(self, payment_information_id):
self.payment_information_id = payment_information_id
class PurposeCode(InfoToAcccountOwnerSubField):
'''NL term: Speciale verwerkingscode'''
tag = 'PURP'
def __init__(self, purpose_of_collection):
self.purpose_of_collection = purpose_of_collection
class RemittanceInformation(InfoToAcccountOwnerSubField):
'''NL term: Omschrijvingsregels'''
tag = 'REMI'
def __init__(self, remittance_info, code=None, issuer=None):
if not isinstance(remittance_info, AbstractRemittanceInfo):
raise ValueError(
"Value for `remittance_info` must be instance of AbstractRemittanceInfo")
self.remittance_info = remittance_info
# TODO: Are these two even used??? They are in the spec but do not
# appear in examples
self.code = code
self.issuer = issuer
class ReturnReason(InfoToAcccountOwnerSubField):
'''NL term: Uitval reden'''
tag = 'RTRN'
def __init__(self, reason_code):
'''NOTE: The ING IBP spec also mentions a legacy R-Type integer
parameter which has the following possible values:
1 - Reject (geweigerde)
2 - Return (retourbetaling)
3 - Refund (terugbetaling)
4 - Reversal (herroeping)
5 - Cancellation (annulering)
The R-Type is concatenated to the `reason_code`. We do not implement the R-Type,
we just mention it here for reference.'''
transfer_failed = TransferFailed.get_instance()
if not transfer_failed.code_is_valid(reason_code):
raise ValueError("Value `reason_code` is invalid: %s" % reason_code)
self.reason_code = reason_code
class UltimateBeneficiary(InfoToAcccountOwnerSubField):
tag = 'ULTB'
def __init__(self, name):
self.name = name
class UltimateCreditor(InfoToAcccountOwnerSubField):
'''NL term: Uiteindelijke incassant'''
tag = 'ULTC'
def __init__(self, name=None, id=None):
self.name = name
self.id = id
class UltimateDebtor(InfoToAcccountOwnerSubField):
'''NL term: Uiteindelijke geincasseerde'''
tag = 'ULTD'
def __init__(self, name=None, id=None):
self.name = name
self.id = id
class InfoToAcccountOwnerSubFieldOrder(object):
# This is the order in which the fields must be written
fields = (
ReturnReason,
BusinessPurpose,
ClientReference,
EndToEndReference,
PaymentInformationID,
InstructionID,
MandateReference,
CreditorID,
CounterPartyID,
BeneficiaryParty,
OrderingParty,
RemittanceInformation,
CounterPartyIdentification,
PurposeCode,
UltimateBeneficiary,
UltimateCreditor,
UltimateDebtor,
ExchangeRate,
Charges,
)
@classmethod
def get_field_classes(cls):
return cls.fields
| mit | -7,662,192,650,122,778,000 | 24.475556 | 89 | 0.654047 | false | 3.544836 | false | false | false |
altugkarakurt/morty | morty/evaluator.py | 1 | 3243 | # -*- coding: utf-8 -*-
from converter import Converter
class Evaluator(object):
"""----------------------------------------------------------------
This class is used for evaluating the validity of our estimations.
We return a dictionary entry as our evaluation result. See the
return statements in each function to see which attributes are
being reported.
----------------------------------------------------------------"""
def __init__(self, tonic_tolerance=20):
self.tonic_tolerance = tonic_tolerance
self.CENT_PER_OCTAVE = 1200
# '+' symbol corresponds to quarter tone higher
self.INTERVAL_SYMBOLS = [
('P1', 0, 25), ('P1+', 25, 75), ('m2', 75, 125), ('m2+', 125, 175),
('M2', 175, 225), ('M2+', 225, 275), ('m3', 275, 325),
('m3+', 325, 375), ('M3', 375, 425), ('M3+', 425, 475),
('P4', 475, 525), ('P4+', 525, 575), ('d5', 575, 625),
('d5+', 625, 675), ('P5', 675, 725), ('P5+', 725, 775),
('m6', 775, 825), ('m6+', 825, 875), ('M6', 875, 925),
('M6+', 925, 975), ('m7', 975, 1025), ('m7+', 1025, 1075),
('M7', 1075, 1125), ('M7+', 1125, 1175), ('P1', 1175, 1200)]
@staticmethod
def evaluate_mode(estimated, annotated, source=None):
mode_bool = annotated == estimated
return {'source': source, 'mode_eval': mode_bool,
'annotated_mode': annotated, 'estimated_mode': estimated}
def evaluate_tonic(self, estimated, annotated, source=None):
est_cent = Converter.hz_to_cent(estimated, annotated)
# octave wrapping
cent_diff = est_cent % self.CENT_PER_OCTAVE
# check if the tonic is found correct
bool_tonic = (min([cent_diff, self.CENT_PER_OCTAVE - cent_diff]) <
self.tonic_tolerance)
# convert the cent difference to symbolic interval (P5, m3 etc.)
interval = None
for i in self.INTERVAL_SYMBOLS:
if i[1] <= cent_diff < i[2]:
interval = i[0]
break
elif cent_diff == 1200:
interval = 'P1'
break
# if they are in the same octave the the estimated and octave-wrapped
# values should be the same (very close)
same_octave = (est_cent - cent_diff < 0.001)
return {'mbid': source, 'tonic_eval': bool_tonic,
'same_octave': same_octave, 'cent_diff': cent_diff,
'interval': interval, 'annotated_tonic': annotated,
'estimated_tonic': estimated}
def evaluate_joint(self, tonic_info, mode_info, source=None):
tonic_eval = self.evaluate_tonic(tonic_info[0], tonic_info[1], source)
mode_eval = self.evaluate_mode(mode_info[0], mode_info[1], source)
# merge the two evaluations
joint_eval = tonic_eval.copy()
joint_eval['mode_eval'] = mode_eval['mode_eval']
joint_eval['annotated_mode'] = mode_eval['annotated_mode']
joint_eval['estimated_mode'] = mode_eval['estimated_mode']
joint_eval['joint_eval'] = (joint_eval['tonic_eval'] and
joint_eval['mode_eval'])
return joint_eval
| agpl-3.0 | -4,064,247,050,701,572,000 | 42.24 | 79 | 0.53284 | false | 3.540393 | false | false | false |
jtauber/cleese | necco/kernel/monkey.py | 1 | 6153 | ################################################################################
import pyvga
import blit
import buf
ss = buf.sym('sokoscreen')
pyvga.exittext()
pyvga.framebuffer[:len(ss)] = ss
################################################################################
import py8042
import keyb
import pybeep
# I think hz = 1193182 / qerf
qerf = [5424, 5424, 0, 0,
5424, 5424, 0, 0,
4058, 4058, 0, 0,
5424, 5424, 0, 0,
3616, 3616, 0, 0,
5424, 5424, 0, 0,
3224, 3224, 0, 0,
3410, 3410, 0, 0]
def kbd_work():
while 1:
if py8042.more_chars():
ch = keyb.translate_scancode(py8042.get_scancode())
if ch:
stack.swap(ch)
else:
stack.swap(None, idl_task)
dir = None
def clk_work():
while 1:
global dir
blit.fill(pyvga.framebuffer, 320, \
312, 0, 8, 8, (isr.ticker & 15) + 16)
pybeep.on(qerf[isr.ticker & 31])
if py8042.more_squeaks():
dx = dy = 0
while py8042.more_squeaks():
_,dx,dy = py8042.get_squeak()
if dx > 10: dir = 'l'
elif dy > 10: dir = 'k'
elif dx < -10: dir = 'h'
elif dy < -10: dir = 'j'
stack.swap(None, idl_task)
elif dir:
ch = dir; dir = None
stack.swap(ch)
else:
stack.swap(None, idl_task)
interrupts = []
def idl_work():
while 1:
if len(interrupts):
stack.swap(None, interrupts.pop(0))
################################################################################
import stack
import isr
kbd_task = buf.bss(0x400); stack.init(kbd_task, kbd_work)
clk_task = buf.bss(0x400); stack.init(clk_task, clk_work)
idl_task = buf.bss(0x400); stack.init(idl_task, idl_work)
def kbd_isr():
interrupts.append(kbd_task)
def clk_isr():
interrupts.append(clk_task)
################################################################################
#--test map--
map = list(' ##### # # # # ### ## # # ### # ## # ###### # # ## ##### ..# # .$ ..# ##### ### #@## ..# # ######### #######')
#--easier level--
#map = list(' ##### # # #$ # ### $## # $ $ # ### # ## # ###### # # ## ##### ..# # $ $ ..# ##### ### #@## ..# # ######### #######')
#--harder level--
#map = list(' ####### # ...# ##### ...# # . .# # ## ...# ## ## ...# ### ######## # $$$ ## ##### $ $ ##### ## #$ $ # # #@ $ $ $ $ # ###### $$ $ ##### # # ########')
tile_ndx = '@&$*#. '
tiles = [buf.sym('sokotile'), buf.sym('sokogoal'),
buf.sym('stonetile'), buf.sym('stonegoal'),
buf.sym('wall'), buf.sym('goal'), buf.sym('floor')]
blit.paste(pyvga.framebuffer,320, 267, 68, tiles[0], 8)
blit.paste(pyvga.framebuffer,320, 140, 136, tiles[2], 8)
blit.paste(pyvga.framebuffer,320, 140, 156, tiles[5], 8)
def disptile(off):
blit.paste(pyvga.framebuffer, 320,
(off % 20) << 3, (off / 20) << 3, # x, y
tiles[tile_ndx.find(map[off])], 8)
def dispall():
i = len(map)
eol = 0
while i > 0: # no for yet?
i = i - 1
if eol and map[i] != ' ':
eol = 0
if not eol:
disptile(i)
if (i % 20) == 0: # 'not i % 20' freezes on hardware?
eol = 1
def move(dir):
if map.count('@'): soko = map.index('@')
else: soko = map.index('&')
s = list('~~~')
s[0] = map[soko]
s[1] = map[soko+dir]
s[2] = map[soko+dir+dir]
if s[1] in ' .':
s[0] = leave(s[0])
s[1] = enter(s[1])
elif s[1] in '$*' and s[2] in ' .':
s[0] = leave(s[0])
s[1] = enter(s[1])
s[2] = slide(s[2])
map[soko] = s[0]
map[soko+dir] = s[1]
map[soko+dir+dir] = s[2]
disptile(soko)
disptile(soko+dir)
disptile(soko+dir+dir)
def leave(c):
if c == '@': return ' '
else: return '.'
def enter(c):
if c in ' $': return '@'
else: return '&'
def slide(c):
if c == ' ': return '$'
else: return '*'
dispall()
isr.setvec(clk_isr, kbd_isr)
while 1:
def loop(msg):
pyvga.cleartext()
pyvga.entertext()
while msg.count('\n'):
n = msg.index('\n')
print msg[:n]
msg = msg[n+1:]
while 1:
stack.swap(None, idl_task)
if not map.count('$'):
loop('''
#### ## #### ##
### ## ### #
### # ### #
### # ### # ##
### # ### # ####
## # ### # ##
### # ### #
### # ## # #
## # ### # #
### # ### ## # ###
### # #### ### ### ### ## # ### ### ###
## # ## # ### ### ### ## # ### #### ##
### ## ## ### ### ## ## # ### ### ##
### ## ## ### ### ### #### # ### ### ##
### ### ## ### ### ### #### # ### ### ##
### ### ## ### ### ## #### # ### ### ##
### ### ## ### ### ## ### # ### ### ##
### ### ## ### ### ### ### ### ### ##
### ### ## ### ### ### ### ### ### ##
### ## ## ### ### ### ### ### ### ##
### ## ## ### ### # # ### ### ##
### ## # ## #### # # ### ### ##
##### ##### ##### ## # # ######## ##
''')
bufchar = stack.swap(None, idl_task)
if bufchar == 'q': loop('Thanks for playing')
elif bufchar in 'hs': move(-1)
elif bufchar in 'jx': move(20)
elif bufchar in 'ke': move(-20)
elif bufchar in 'ld': move(1)
elif bufchar == 'p': dispall()
| mit | -4,385,249,557,267,020,000 | 29.919598 | 289 | 0.322932 | false | 2.902358 | false | false | false |
JonasSC/SuMPF | sumpf/_data/_filters/_bands.py | 1 | 8876 | # This file is a part of the "SuMPF" package
# Copyright (C) 2018-2021 Jonas Schulte-Coerne
#
# This program is free software: you can redistribute it and/or modify it under
# the terms of the GNU Lesser General Public License as published by the Free
# Software Foundation, either version 3 of the License, or (at your option) any
# later version.
#
# This program is distributed in the hope that it will be useful, but WITHOUT
# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more
# details.
#
# You should have received a copy of the GNU Lesser General Public License along
# with this program. If not, see <http://www.gnu.org/licenses/>.
"""contains the class for :class:`~sumpf.Bands`-filter"""
import collections.abc
import numpy
import sumpf._internal as sumpf_internal
from ._base import Filter
__all__ = ("Bands",)
class Bands(Filter):
"""A filter, that is defined by supporting points and functions for interpolation
and extrapolation. Use cases for this filter include storing the result of an
n-th octave analysis or reading an equalization and applying it to a :class:`~sumpf.Signal`.
"""
interpolations = sumpf_internal.Interpolations #: an enumeration with flags for defining the interpolation and extrapolation functions
file_formats = sumpf_internal.filter_writers.BandsFormats #: an enumeration with file formats, whose flags can be passed to :meth:`~sumpf.Bands.save`
def __init__(self,
bands=({},),
interpolations=sumpf_internal.Interpolations.LOGARITHMIC,
extrapolations=sumpf_internal.Interpolations.STAIRS_LIN,
labels=("Bands",)):
"""
:param bands: a sequence of dictionaries, that map float frequency values
to complex values of the filter function. This can also be
a single dictionary, if the bands filter shall only have one
channel.
:param interpolation: a sequence of flags from the :class:`sumpf.Bands.interpolations`
enumeration, that defines the function, with which
the interpolation between the samples given in the
``bands`` dictionary shall be computed. This can also
be a single flag, if the same interpolation shall
be used for all channels.
:param extrapolation: a sequence of flags from the :class:`sumpf.Bands.interpolations`
enumeration, that defines the function, with which
the extrapolation outside the samples given in the
``bands`` dictionary shall be computed. This can also
be a single flag, if the same extrapolation shall
be used for all channels.
:param labels: a sequence of string labels for the channels.
"""
# make sure, that all data is in sequences with the correct length
if isinstance(bands, collections.abc.Mapping):
bands = (bands,)
if not isinstance(interpolations, collections.abc.Sequence):
interpolations = (interpolations,) * len(bands)
elif len(interpolations) < len(bands):
interpolations = tuple(interpolations) + (interpolations[-1],) * (len(bands) - len(interpolations))
if not isinstance(extrapolations, collections.abc.Sequence):
extrapolations = (extrapolations,) * len(bands)
elif len(extrapolations) < len(bands):
extrapolations = tuple(extrapolations) + (extrapolations[-1],) * (len(bands) - len(extrapolations))
if not isinstance(labels, collections.abc.Sequence):
labels = (labels,) * len(bands)
elif len(labels) < len(bands):
labels = tuple(labels) + (labels[0] if labels else "Bands",) * (len(bands) - len(labels))
# create the transfer functions
tfs = []
for b, i, e in zip(bands, interpolations, extrapolations):
fs = numpy.array(sorted(b.keys()))
tf = Bands.Bands(xs=fs,
ys=numpy.array([b[x] for x in fs]),
interpolation=i,
extrapolation=e)
tfs.append(tf)
# initialize the filter
Filter.__init__(self,
transfer_functions=tfs,
labels=labels)
# store the original data
self.__bands = bands
self.__interpolations = [int(i) for i in interpolations[0:len(tfs)]]
self.__extrapolations = [int(e) for e in extrapolations[0:len(tfs)]]
def __repr__(self):
"""Operator overload for using the built-in function :func:`repr` to generate
a string representation of the bands filter, that can be evaluated with :func:`eval`.
:returns: a potentially very long string
"""
return (f"{self.__class__.__name__}(bands={self.__bands!r}, "
f"interpolations={self.__interpolations}, "
f"extrapolations={self.__extrapolations}, "
f"labels={self.labels()})")
def save(self, path, file_format=file_formats.AUTO):
"""Saves the bands filter to a file. The file will be created if it does not exist.
:param path: the path to the file
:param file_format: an optional flag from the :attr:`sumpf.Bands.file_formats`
enumeration, that specifies the file format, in which
the bands filter shall be stored. If this parameter
is omitted or set to :attr:`~sumpf.Bands.file_formats`.\ ``AUTO``,
the format will be guessed from the ending of the filename.
:returns: self
"""
writer = sumpf_internal.get_writer(file_format=file_format,
writers=sumpf_internal.filter_writers.bands_writers,
writer_base_class=sumpf_internal.filter_writers.Writer)
writer(self, path)
return self
def to_db(self, reference=1.0, factor=20.0):
"""Computes a bands filter with the values of this filter converted to
decibels. It will use the same interpolation and extrapolation functions
as the original filter.
This method takes the values from the bands filter as they are, which might
not make sense in case of complex of negative filter values. Consider
computing the magnitude of the filter by using the :func:`abs` function
before calling this method.
:param reference: the value, by which the filter's values are divided before
computing the logarithm. Usually, this is one, but for
example when converting a filter in Pascal to dB[SPL],
the reference must be set to 20e-6.
:param factor: the factor, with which the logarithm is multiplied. Use
20 for root-power quantities (if the bands' values are amplitudes)
and 10 for power quantities (if the bands' values are energies
or powers).
"""
return Bands(bands=[{f: factor * numpy.log10(y / reference) for f, y in b.items()} for b in self.__bands],
interpolations=self.__interpolations,
extrapolations=self.__extrapolations,
labels=self.labels())
def from_db(self, reference=1.0, factor=20.0):
"""Computes a bands filter with the values of this filter converted from
decibels to a linear representation. It will use the same interpolation
and extrapolation functions as the original filter.
:param reference: the value, by which the filter's values are divided before
computing the logarithm. Usually, this is one, but for
example when converting a filter in dB[SPL] to Pascal
the reference must be set to 20e-6.
:param factor: the factor, with which the logarithm is multiplied. Use
20 for root-power quantities (if the bands' values are amplitudes)
and 10 for power quantities (if the bands' values are energies
or powers).
"""
return Bands(bands=[{f: reference * 10.0 ** (y / factor) for f, y in b.items()} for b in self.__bands],
interpolations=self.__interpolations,
extrapolations=self.__extrapolations,
labels=self.labels())
| lgpl-3.0 | -2,893,990,470,551,838,700 | 53.790123 | 155 | 0.604664 | false | 4.610909 | false | false | false |
pulilab/django-collectform | collectform/views.py | 1 | 1327 | from django.conf import settings
from django.contrib.contenttypes.models import ContentType
from rest_framework.decorators import api_view
from rest_framework.response import Response
from .forms import DistributionRequestForm
from .models import mail_request_to_managers
@api_view(['POST'])
def handle_post(request):
response = {'status': 'error'}
status_code = 400
data = request.DATA
data.update({
'name': request.user.get_full_name(),
'email': request.user.email,
'username': request.user.username,
})
data.setdefault('vidzios', None)
vidzios = data['vidzios']
del data['vidzios']
form = DistributionRequestForm(data=data)
if form.is_valid():
dr = form.save()
response['status'] = 'success'
status_code = 200
if vidzios:
ct = ContentType.objects.get_by_natural_key(app_label=settings.COLLECTFORM_RELATED_MODEL[0], model=settings.COLLECTFORM_RELATED_MODEL[1])
for vidzio_id in vidzios:
dr.vidzios.create(**{
'content_type': ct,
'object_id': vidzio_id,
})
mail_request_to_managers(sender=None, instance=dr, created=True)
else:
response['errors'] = form.errors
return Response(response, status=status_code)
| bsd-3-clause | -286,069,211,578,861,470 | 33.921053 | 149 | 0.639035 | false | 3.824207 | false | false | false |
Ernestyj/PyStudy | finance/HurstExponent.py | 1 | 5306 | #!/usr/bin/env python
# -*- coding: utf-8 -*-
import numpy as np
import pandas as pd
import matplotlib.pyplot as plt
import matplotlib.finance as mpf
import math
# Step1:时间序列分段
# 子区间长度n是可变的,如果进行回归分析需要进行将时间序列进行分段,例如若时间序列长度为240,则其可以分解成4段长度为60的等长子区间,
# 或者6段长度为40的等长子区间……
'''
输入:数据长度
输出:分段方案矩阵
'''
def getSegmentationMatrix(dataLen):
segmentMatrix = []
end = math.floor(dataLen/4)
for i in range(4, int(end+1)):
if dataLen%i==0:
segmentMatrix.append([i, dataLen/i])
return segmentMatrix
# step2:Hurst指数计算
'''
输入:时间序列数组
输出:hurst指数值
'''
# def computeHurst1(data):
# data = np.array(data).astype('float')
# dataLen = len(data)
# segmentMatrix = getSegmentationMatrix(dataLen)
# segMethod = len(segmentMatrix)#分段方案数
# logRS = np.zeros(segMethod)
# logN = np.zeros(segMethod)
# for i in range(0, segMethod):
# dataMat = data.reshape(segmentMatrix[i])
# m = segmentMatrix[i][0]
# n = segmentMatrix[i][1]
# meanArr = dataMat.mean(axis=1)
# # 计算第a个区间的累计离差(转置)
# subMatTrans = dataMat.T-meanArr
# cumSubMat = subMatTrans.T.cumsum(axis=1)
# RVector = np.zeros(n*m).reshape(n, m)
# SVector = np.zeros(n*m).reshape(n, m)
# # 计算(R/S)n
# for j in range(n):
# RVector[j] = cumSubMat[:,:j+1].max(axis=1)-cumSubMat[:,:j+1].min(axis=1)
# SVector[j] = dataMat[:,:j+1].std(axis=1)
# logRS[i] = math.log((RVector/SVector).T.mean(axis=1).mean())
# logN[i] = math.log(n)
# return np.polyfit(logN, logRS, 1)[0]
def computeHurst(data):
data = np.array(data).astype('float')
dataLen = len(data)
segmentMatrix = getSegmentationMatrix(dataLen)
segMethod = len(segmentMatrix)#分段方案数
logRS = np.zeros(segMethod)
logN = np.zeros(segMethod)
for i in range(0, segMethod):
dataMat = data.reshape(segmentMatrix[i])
m = segmentMatrix[i][0]
n = segmentMatrix[i][1]
meanArr = dataMat.mean(axis=1)
# 计算第a个区间的累计离差(转置)
subMatTrans = dataMat.T-meanArr
cumSubMat = subMatTrans.T.cumsum(axis=1)
# 计算(R/S)n
RVector = cumSubMat.max(axis=1)-cumSubMat.min(axis=1)
SVector = dataMat.std(axis=1)
logRS[i] = math.log((RVector/SVector).mean())
logN[i] = math.log(n)
return np.polyfit(logN, logRS, 1)[0]
# step3:移动平均hurst指数计算
# 例如计算120个交易日的Husrt指数,使用的数据为[t-119,t]的价格数据即可,移动平均的意思为根据t的向前移动,
# 计算指数的数据[t-119,t]的价格数据同时根据t进行移动。
'''
输入:以时间为索引的Series
输出:以时间为索引的hurst Series
'''
def computeMovingHurst(dataSeries, window=120):
dataLen = len(dataSeries)
if dataLen<window:
print 'window length is bigger than data length'
return
logPrices = np.log(dataSeries.values)
indexReturns = np.append([0], np.diff(logPrices))
hursts = np.zeros(dataLen)
hursts[0:window] = np.NaN
for i in range(dataLen-window):
hursts[window+i] = computeHurst(indexReturns[i:i+window])
return pd.Series(hursts, index=dataSeries.index)
# 计算E(H),用Peters方法计算E[(R/S)n]
'''
输入:时间序列数组
输出:hurst指数期望值
'''
def computeHurstExpecPeters(data):
dataLen = len(data)
segmentMatrix = getSegmentationMatrix(dataLen)
segMethod = len(segmentMatrix)#分段方案数
logERS = np.zeros(segMethod)
logN = np.zeros(segMethod)
for i in range(0, segMethod):
n = segmentMatrix[i][1]
# 用Peters方法计算E[(R/S)n]
tempSum = 0
for r in range(1, n):
tempSum += math.sqrt((n-1)/r)
ERS = (n-0.5)/n * math.pow(n*math.pi/2, -0.5) * tempSum
logERS[i] = math.log(ERS)
logN[i] = math.log(n)
return np.polyfit(logN, logERS, 1)[0]
from numpy import cumsum, log, polyfit, sqrt, std, subtract
from numpy.random import randn
def hurst(ts):
"""Returns the Hurst Exponent of the time series vector ts"""
# Create the range of lag values
lags = range(2, 100)
# Calculate the array of the variances of the lagged differences
tau = [sqrt(std(subtract(ts[lag:], ts[:-lag]))) for lag in lags]
# Use a linear fit to estimate the Hurst Exponent
poly = polyfit(log(lags), log(tau), 1)
# Return the Hurst exponent from the polyfit output
return poly[0]*2.0
# Create a Gometric Brownian Motion, Mean-Reverting and Trending Series
gbm = log(cumsum(randn(100000))+1000)
mr = log(randn(100000)+1000)
tr = log(cumsum(randn(100000)+1)+1000)
# Output the Hurst Exponent for each of the above series
# and the price of Google (the Adjusted Close price) for
# the ADF test given above in the article
# print "Hurst(GBM): %s" % hurst(gbm)
# print "Hurst(MR): %s" % hurst(mr)
# print "Hurst(TR): %s" % hurst(tr)
# Hurst(GBM): 0.500606209426
# Hurst(MR): 0.000313348900533
# Hurst(TR): 0.947502376783 | apache-2.0 | 3,487,723,096,175,357,400 | 31.02027 | 86 | 0.644576 | false | 2.371371 | false | false | false |
ecreall/lagendacommun | lac/content/processes/film_schedule_management/definition.py | 1 | 3125 | # Copyright (c) 2014 by Ecreall under licence AGPL terms
# available on http://www.gnu.org/licenses/agpl.html
# licence: AGPL
# author: Amen Souissi
from dace.processdefinition.processdef import ProcessDefinition
from dace.processdefinition.activitydef import ActivityDefinition
from dace.processdefinition.gatewaydef import (
ExclusiveGatewayDefinition,
ParallelGatewayDefinition)
from dace.processdefinition.transitiondef import TransitionDefinition
from dace.processdefinition.eventdef import (
StartEventDefinition,
EndEventDefinition)
from dace.objectofcollaboration.services.processdef_container import (
process_definition)
from pontus.core import VisualisableElement
from .behaviors import (
AddCinemagoer,
EditFilmSchedule,
SeeFilmSchedule,
RemoveFilmSchedule)
from lac import _
@process_definition(name='filmschedulemanagement',
id='filmschedulemanagement')
class FilmScheduleManagement(ProcessDefinition, VisualisableElement):
isUnique = True
def __init__(self, **kwargs):
super(FilmScheduleManagement, self).__init__(**kwargs)
self.title = _('Film schedule management')
self.description = _('Film schedule management')
def _init_definition(self):
self.defineNodes(
start = StartEventDefinition(),
pg = ParallelGatewayDefinition(),
add_cinemagoer = ActivityDefinition(contexts=[AddCinemagoer],
description=_("Add cinema sessions"),
title=_("Add cinema sessions"),
groups=[_("Add")]),
edit = ActivityDefinition(contexts=[EditFilmSchedule],
description=_("Edit the film synopsis"),
title=_("Edit"),
groups=[]),
see = ActivityDefinition(contexts=[SeeFilmSchedule],
description=_("Details"),
title=_("Details"),
groups=[]),
remove = ActivityDefinition(contexts=[RemoveFilmSchedule],
description=_("Remove the film synopsis"),
title=_("Remove"),
groups=[]),
eg = ExclusiveGatewayDefinition(),
end = EndEventDefinition(),
)
self.defineTransitions(
TransitionDefinition('start', 'pg'),
TransitionDefinition('pg', 'add_cinemagoer'),
TransitionDefinition('add_cinemagoer', 'eg'),
TransitionDefinition('pg', 'see'),
TransitionDefinition('see', 'eg'),
TransitionDefinition('pg', 'edit'),
TransitionDefinition('edit', 'eg'),
TransitionDefinition('pg', 'remove'),
TransitionDefinition('remove', 'eg'),
TransitionDefinition('eg', 'end'),
)
| agpl-3.0 | 676,595,774,552,225,900 | 41.808219 | 81 | 0.56384 | false | 5.139803 | false | false | false |
MariusWirtz/TM1py | TM1py/Objects/Element.py | 1 | 2041 | # -*- coding: utf-8 -*-
import collections
import json
from TM1py.Objects.TM1Object import TM1Object
from TM1py.Utils.Utils import lower_and_drop_spaces
class Element(TM1Object):
""" Abstraction of TM1 Element
"""
valid_types = ('Numeric', 'String', 'Consolidated')
def __init__(self, name, element_type, attributes=None, unique_name=None, index=None):
self._name = name
self._unique_name = unique_name
self._index = index
self._element_type = None
self.element_type = element_type
self._attributes = attributes
@staticmethod
def from_dict(element_as_dict):
return Element(name=element_as_dict['Name'],
unique_name=element_as_dict['UniqueName'],
index=element_as_dict['Index'],
element_type=element_as_dict['Type'],
attributes=element_as_dict['Attributes'])
@property
def name(self):
return self._name
@name.setter
def name(self, value):
self._name = value
@property
def unique_name(self):
return self._unique_name
@property
def index(self):
return self._index
@property
def element_attributes(self):
return self._attributes
@property
def element_type(self):
return self._element_type
@element_type.setter
def element_type(self, value):
element_type = lower_and_drop_spaces(value).capitalize()
if element_type in self.valid_types:
self._element_type = element_type
else:
raise ValueError('{} is not a valid Element Type'.format(value))
@property
def body(self):
return json.dumps(self._construct_body())
@property
def body_as_dict(self):
return self._construct_body()
def _construct_body(self):
body_as_dict = collections.OrderedDict()
body_as_dict['Name'] = self._name
body_as_dict['Type'] = self._element_type
return body_as_dict
| mit | -332,317,142,393,231,940 | 25.855263 | 90 | 0.601666 | false | 4.009823 | false | false | false |
erigones/esdc-ce | api/template/base/serializers.py | 1 | 6476 | from copy import deepcopy
from django.utils.translation import ugettext_lazy as _
from django.utils.six import iteritems
from api import serializers as s
from api.validators import validate_alias
from api.vm.utils import get_owners
from api.vm.define.serializers import VmDefineSerializer, KVmDefineDiskSerializer, VmDefineNicSerializer
from api.vm.snapshot.serializers import SnapshotDefineSerializer
from api.vm.backup.serializers import BackupDefineSerializer
from gui.models import User
from vms.models import VmTemplate
def create_dummy_serializer(serializer_cls, skip_fields=(), required_fields=()):
"""Convert existing serializer class into serializer that can be used as a serializer field.
The resulting serializer is missing the original validators and field required attribute
@type serializer_cls: api.serializers.Serializer
"""
class Serializer(s.Serializer):
pass
# noinspection PyUnresolvedReferences
for name, field in iteritems(serializer_cls.base_fields):
if name in skip_fields or field.read_only:
continue
if isinstance(field, s.RelatedField):
new_field = s.CharField()
else:
new_field = deepcopy(field) # Do not touch the original field
if name in required_fields:
new_field.required = True
else:
new_field.required = False
# noinspection PyUnresolvedReferences
Serializer.base_fields[name] = new_field
return Serializer
def validate_dummy_serializer(serializer, value):
ser = serializer(data=value)
ser.is_valid()
for i in ser.init_data:
if i not in ser.fields:
# noinspection PyProtectedMember
ser._errors[i] = s.ErrorList([_('Invalid field.')])
if ser.errors:
raise s.NestedValidationError(ser.errors)
class _DefineField(s.DictField):
_serializer = None
def validate(self, value):
validate_dummy_serializer(self._serializer, value)
class VmDefineField(_DefineField):
_serializer = create_dummy_serializer(VmDefineSerializer)
class _DefineArrayField(s.DictArrayField):
_serializer = None
def validate(self, value):
super(_DefineArrayField, self).validate(value)
for i in value:
validate_dummy_serializer(self._serializer, i)
class VmDefineDiskField(_DefineArrayField):
_serializer = create_dummy_serializer(KVmDefineDiskSerializer)
class VmDefineNicField(_DefineArrayField):
_serializer = create_dummy_serializer(VmDefineNicSerializer)
class VmDefineSnapshotField(_DefineArrayField):
_serializer = create_dummy_serializer(SnapshotDefineSerializer, required_fields=('name',))
class VmDefineBackupField(_DefineArrayField):
_serializer = create_dummy_serializer(BackupDefineSerializer, required_fields=('name',))
class TemplateSerializer(s.ConditionalDCBoundSerializer):
"""
vms.models.Template
"""
_model_ = VmTemplate
_update_fields_ = ('alias', 'owner', 'access', 'desc', 'ostype', 'dc_bound', 'vm_define',
'vm_define_disk', 'vm_define_nic', 'vm_define_snapshot', 'vm_define_backup')
_default_fields_ = ('name', 'alias', 'owner')
_null_fields_ = frozenset({'ostype', 'vm_define', 'vm_define_disk',
'vm_define_nic', 'vm_define_snapshot', 'vm_define_backup'})
name = s.RegexField(r'^[A-Za-z0-9][A-Za-z0-9\._-]*$', max_length=32)
alias = s.SafeCharField(max_length=32)
owner = s.SlugRelatedField(slug_field='username', queryset=User.objects, required=False)
access = s.IntegerChoiceField(choices=VmTemplate.ACCESS, default=VmTemplate.PRIVATE)
desc = s.SafeCharField(max_length=128, required=False)
ostype = s.IntegerChoiceField(choices=VmTemplate.OSTYPE, required=False, default=None)
vm_define = VmDefineField(default={}, required=False)
vm_define_disk = VmDefineDiskField(default=[], required=False, max_items=2)
vm_define_nic = VmDefineNicField(default=[], required=False, max_items=4)
vm_define_snapshot = VmDefineSnapshotField(default=[], required=False, max_items=16)
vm_define_backup = VmDefineBackupField(default=[], required=False, max_items=16)
created = s.DateTimeField(read_only=True, required=False)
def __init__(self, request, tmp, *args, **kwargs):
super(TemplateSerializer, self).__init__(request, tmp, *args, **kwargs)
if not kwargs.get('many', False):
self._dc_bound = tmp.dc_bound
self.fields['owner'].queryset = get_owners(request, all=True)
def _normalize(self, attr, value):
if attr == 'dc_bound':
return self._dc_bound
# noinspection PyProtectedMember
return super(TemplateSerializer, self)._normalize(attr, value)
def validate_alias(self, attrs, source):
try:
value = attrs[source]
except KeyError:
pass
else:
validate_alias(self.object, value)
return attrs
def validate(self, attrs):
if self.request.method == 'POST' and self._dc_bound:
limit = self._dc_bound.settings.VMS_TEMPLATE_LIMIT
if limit is not None:
if VmTemplate.objects.filter(dc_bound=self._dc_bound).count() >= int(limit):
raise s.ValidationError(_('Maximum number of server templates reached.'))
try:
ostype = attrs['ostype']
except KeyError:
ostype = self.object.ostype
try:
vm_define = attrs['vm_define']
except KeyError:
vm_define = self.object.vm_define
vm_define_ostype = vm_define.get('ostype', None)
# The template object itself has an ostype field, which is used to limit the use of a template on the DB level;
# However, also the template.vm_define property can have an ostype attribute, which will be used for a new VM
# (=> will be inherited from the template). A different ostype in both places will lead to strange situations
# (e.g. using a Windows template, which will create a Linux VM). Therefore we have to prevent such situations.
if vm_define_ostype is not None and ostype != vm_define_ostype:
raise s.ValidationError('Mismatch between vm_define ostype and template ostype.')
return super(TemplateSerializer, self).validate(attrs)
class ExtendedTemplateSerializer(TemplateSerializer):
dcs = s.DcsField()
| apache-2.0 | -164,667,068,585,603,100 | 36.433526 | 119 | 0.677424 | false | 4.009907 | false | false | false |
DamnWidget/mamba | mamba/test/dummy_app/twisted/plugins/dummy_plugin.py | 3 | 1260 | from zope.interface import implements
from twisted.python import usage
from twisted.plugin import IPlugin
from twisted.application.service import IServiceMaker
from twisted.application import internet
from mamba.utils import config
from dummy import MambaApplicationFactory
settings = config.Application('config/application.json')
class Options(usage.Options):
optParameters = [
['port', 'p', settings.port, 'The port number to listen on']
]
class MambaServiceMaker(object):
implements(IServiceMaker, IPlugin)
tapname = settings.name
description = settings.description
options = Options
def makeService(self, options):
"""Construct a TCPServer from a factory defined in pericote
"""
factory, application = MambaApplicationFactory(settings)
httpserver = internet.TCPServer(int(options['port']), factory)
httpserver.setName('{} Application'.format(settings.name))
httpserver.setServiceParent(application)
return httpserver
# Now construct an object which *provides* the relevant interfaces
# The name of this variable is irrelevant, as long as there is *some*
# name bound to a provider of IPlugin and IServiceMaker
mamba_service_maker = MambaServiceMaker()
| gpl-3.0 | -5,399,313,289,844,860,000 | 29.731707 | 70 | 0.743651 | false | 4.315068 | false | false | false |
Petr-By/qtpyvis | qtgui/panels/advexample.py | 1 | 8103 | """
File: logging.py
Author: Ulf Krumnack
Email: krumnack@uni-osnabrueck.de
Github: https://github.com/krumnack
"""
# standard imports
from base import Runner
from toolbox import Toolbox
# Qt imports
from PyQt5.QtCore import Qt
from PyQt5.QtWidgets import (QPushButton, QSpinBox,
QVBoxLayout, QHBoxLayout)
# toolbox imports
from toolbox import Toolbox
from dltb.base.data import Data
from dltb.base.image import Image, Imagelike
# GUI imports
from .panel import Panel
from ..utils import QObserver
from ..widgets.matplotlib import QMatplotlib
from ..widgets.training import QTrainingBox
from ..widgets.data import QDataSelector
class AdversarialExamplePanel(Panel, QObserver, qobservables={
Toolbox: {'input_changed'}}):
"""A panel displaying adversarial examples.
Attributes
----------
_network: NetworkView
A network trained as autoencoder.
"""
def __init__(self, toolbox: Toolbox = None, **kwargs):
"""Initialization of the AdversarialExamplePanel.
Parameters
----------
parent: QWidget
The parent argument is sent to the QWidget constructor.
"""
super().__init__(**kwargs)
self._controller = None # FIXME[old]
self._initUI()
self._layoutUI()
self.setToolbox(toolbox)
# FIXME[old]
# self.setController(AdversarialExampleController())
def _initUI(self):
"""Initialize the user interface.
The user interface contains the following elements:
* the data selector: depicting the current input image
and allowing to select new inputs from a datasource
* ouput: adversarial example
* ouput: adversarial perturbation
* ouput: statistics
"""
#
# Input data
#
self._dataSelector = QDataSelector()
self._dataView = self._dataSelector.dataView()
self._dataView.addAttribute('filename')
self._dataView.addAttribute('basename')
self._dataView.addAttribute('directory')
self._dataView.addAttribute('path')
self._dataView.addAttribute('regions')
self._dataView.addAttribute('image')
#
# Controls
#
self._buttonCreateModel = QPushButton("Create")
self._buttonTrainModel = QPushButton("Train")
self._buttonLoadModel = QPushButton("Load")
self._buttonSaveModel = QPushButton("Save")
self._buttonResetModel = QPushButton("Reset")
self._buttonPlotModel = QPushButton("Plot Model")
self._buttonShowExample = QPushButton("Show")
self._buttonShowExample.clicked.connect(self._onShowExample)
#
# Plots
#
self._trainingBox = QTrainingBox()
self._pltOriginal = QMatplotlib()
self._pltAdversarial = QMatplotlib()
def _layoutUI(self):
"""Layout the UI elements.
"""
# The big picture:
#
# +--------------------+----------------------------------------+
# |+------------------+|+------------------------------------+ |
# ||dataSelector ||| Result | |
# ||[view] ||| (Adversarial Example) | |
# || ||| | |
# || ||| | |
# || ||| Diffs | |
# || ||| (Adversarial Perturbation) | |
# ||[navigator] ||| Statistics | |
# || ||| | |
# || ||| Selector | |
# |+------------------+|+------------------------------------+ |
# +--------------------+----------------------------------------+
plotBar = QHBoxLayout()
plotBar.addWidget(self._dataSelector)
plotBar.addWidget(self._trainingBox)
plotBar.addWidget(self._pltOriginal)
plotBar.addWidget(self._pltAdversarial)
buttonBar = QHBoxLayout()
buttonBar.addWidget(self._buttonCreateModel)
buttonBar.addWidget(self._buttonTrainModel)
buttonBar.addWidget(self._buttonLoadModel)
buttonBar.addWidget(self._buttonSaveModel)
buttonBar.addWidget(self._buttonResetModel)
buttonBar.addWidget(self._buttonPlotModel)
buttonBar.addWidget(self._buttonShowExample)
layout = QVBoxLayout()
layout.addLayout(plotBar)
layout.addLayout(buttonBar)
self.setLayout(layout)
def setImage(self, image: Imagelike) -> None:
"""Set the image for this :py:class:`FacePanel`. This
will initiate the processing of this image using the
current tools.
"""
self.setData(Image.as_data(image))
def setData(self, data: Data) -> None:
"""Set the data to be processed by this :py:class:`FacePanel`.
"""
# set data for the dataView - this is redundant if data is set
# from the toolbox (as the dataView also observes the toolbox),
# but it is necessary, if setData is called independently.
self._dataView.setData(data)
# FIXME[todo]: generate adversarial example.
def setToolbox(self, toolbox: Toolbox) -> None:
"""Set a new Toolbox.
We are only interested in changes of the input data.
"""
self._dataSelector.setToolbox(toolbox)
# self._dataView.setToolbox(toolbox)
self.setData(toolbox.input_data if toolbox is not None else None)
def toolbox_changed(self, toolbox: Toolbox,
change: Toolbox.Change) -> None:
# pylint: disable=invalid-name
"""The FacePanel is a Toolbox.Observer. It is interested
in input changes and will react with applying face recognition
to a new input image.
"""
if change.input_changed:
self.setData(toolbox.input_data)
# FIXME[old]
# FIXME[hack]: no quotes!
def setController(self, controller: 'AdversarialExampleController') -> None:
self._controller = controller
self._buttonCreateModel.clicked.connect(controller.create_model)
self._buttonTrainModel.clicked.connect(controller.train_model)
self._buttonLoadModel.clicked.connect(controller.load_model)
self._buttonSaveModel.clicked.connect(controller.save_model)
self._buttonResetModel.clicked.connect(controller.reset_model)
self.observe(controller)
def _enableComponents(self, running=False):
print(f"enable components: {running}")
available = self._controller is not None and not running
self._buttonCreateModel.setEnabled(not running)
for w in (self._buttonTrainModel,
self._buttonLoadModel, self._buttonSaveModel,
self._buttonPlotModel,
self._buttonShowExample):
w.setEnabled(available)
def _onShowExample(self):
if self._controller is None:
self._pltOriginal.noData()
self._pltAdversarial.noData()
else:
example_data, example_label, example_prediction = \
self._controller.get_example()
with self._pltOriginal as ax:
ax.imshow(example_data[:,:,0], cmap='Greys_r')
ax.set_title(f"Label = {example_label.argmax()}, "
f"Prediction = {example_prediction.argmax()}")
adversarial_data, adversarial_prediction = \
self._controller.get_adversarial_example()
with self._pltAdversarial as ax:
ax.imshow(adversarial_data[:,:,0], cmap='Greys_r')
ax.set_title(f"Prediction = {adversarial_prediction.argmax()}")
def adversarialControllerChanged(self, controller, change):
if 'busy_changed' in change:
self._enableComponents(controller.busy)
| mit | -4,019,713,716,060,868,000 | 36.688372 | 80 | 0.569049 | false | 4.316995 | false | false | false |
SistemasAgiles/pyafipws.simulador | models/db.py | 2 | 3441 | # -*- coding: utf-8 -*-
#########################################################################
## This scaffolding model makes your app work on Google App Engine too
#########################################################################
if request.env.web2py_runtime_gae: # if running on Google App Engine
db = DAL('gae') # connect to Google BigTable
session.connect(request, response, db=db) # and store sessions and tickets there
### or use the following lines to store sessions in Memcache
# from gluon.contrib.memdb import MEMDB
# from google.appengine.api.memcache import Client
# session.connect(request, response, db=MEMDB(Client())
else: # else use a normal relational database
db = DAL('sqlite://storage.sqlite') # if not, use SQLite or other DB
## if no need for session
# session.forget()
#########################################################################
## Here is sample code if you need for
## - email capabilities
## - authentication (registration, login, logout, ... )
## - authorization (role based authorization)
## - services (xml, csv, json, xmlrpc, jsonrpc, amf, rss)
## - crud actions
## comment/uncomment as needed
from gluon.tools import *
auth=Auth(globals(),db) # authentication/authorization
auth.settings.hmac_key='sha512:e59112ff-dd3e-4575-88ce-12550860d018'
auth.define_tables() # creates all needed tables
crud=Crud(globals(),db) # for CRUD helpers using auth
service=Service(globals()) # for json, xml, jsonrpc, xmlrpc, amfrpc
# crud.settings.auth=auth # enforces authorization on crud
# mail=Mail() # mailer
# mail.settings.server='smtp.gmail.com:587' # your SMTP server
# mail.settings.sender='you@gmail.com' # your email
# mail.settings.login='username:password' # your credentials or None
# auth.settings.mailer=mail # for user email verification
# auth.settings.registration_requires_verification = True
# auth.settings.registration_requires_approval = True
# auth.messages.verify_email = 'Click on the link http://'+request.env.http_host+URL(r=request,c='default',f='user',args=['verify_email'])+'/%(key)s to verify your email'
# auth.settings.reset_password_requires_verification = True
# auth.messages.reset_password = 'Click on the link http://'+request.env.http_host+URL(r=request,c='default',f='user',args=['reset_password'])+'/%(key)s to reset your password'
## more options discussed in gluon/tools.py
#########################################################################
#########################################################################
## Define your tables below, for example
##
## >>> db.define_table('mytable',Field('myfield','string'))
##
## Fields can be 'string','text','password','integer','double','boolean'
## 'date','time','datetime','blob','upload', 'reference TABLENAME'
## There is an implicit 'id integer autoincrement' field
## Consult manual for more options, validators, etc.
##
## More API examples for controllers:
##
## >>> db.mytable.insert(myfield='value')
## >>> rows=db(db.mytable.myfield=='value').select(db.mytable.ALL)
## >>> for row in rows: print row.id, row.myfield
#########################################################################
| agpl-3.0 | -8,921,984,654,793,008,000 | 52.765625 | 176 | 0.57367 | false | 4.170909 | false | false | false |
danaukes/popupcad | popupcad/graphics2d/interactivevertex.py | 2 | 2746 | # -*- coding: utf-8 -*-
"""
Written by Daniel M. Aukes and CONTRIBUTORS
Email: danaukes<at>asu.edu.
Please see LICENSE for full license.
"""
import qt.QtCore as qc
import qt.QtGui as qg
#import popupcad.graphics2d.modes as modes
#from popupcad.graphics2d.graphicsitems import Common
from popupcad.graphics2d.interactivevertexbase import InteractiveVertexBase
class InteractiveVertex(InteractiveVertexBase):
radius = 10
z_below = 100
z_above = 105
def __init__(self, *args, **kwargs):
super(InteractiveVertex, self).__init__(*args, **kwargs)
self.connectedinteractive = None
def setconnection(self, connectedinteractive):
self.connectedinteractive = connectedinteractive
def hoverEnterEvent(self, event):
qg.QGraphicsEllipseItem.hoverEnterEvent(self, event)
if self.connectedinteractive is not None:
self.setZValue(self.z_above)
self.updatestate(self.states.state_hover)
def hoverLeaveEvent(self, event):
qg.QGraphicsEllipseItem.hoverLeaveEvent(self, event)
self.setZValue(self.z_below)
self.updatestate(self.states.state_neutral)
def mouseMoveEvent(self, event):
if self.connectedinteractive.mode is not None:
if self.connectedinteractive.mode == self.connectedinteractive.modes.mode_edit:
super(InteractiveVertex, self).mouseMoveEvent(event)
def mousePressEvent(self, event):
modifiers = int(event.modifiers())
shiftpressed = modifiers & qc.Qt.ShiftModifier
ctrlpressed = modifiers & qc.Qt.ControlModifier
remove = ctrlpressed and shiftpressed
if remove:
if self.connectedinteractive is not None:
self.connectedinteractive.removevertex(self)
self.removefromscene()
else:
super(InteractiveVertex, self).mousePressEvent(event)
class InteractiveShapeVertex(InteractiveVertex):
radius = 10
z_below = 100
z_above = 105
class ReferenceInteractiveVertex(InteractiveVertex):
radius = 10
z_below = 100
z_above = 105
class DrawingPoint(InteractiveVertexBase):
isDeletable = True
radius = 5
z_below = 101
z_above = 105
def __init__(self, *args, **kwargs):
super(DrawingPoint, self).__init__(*args, **kwargs)
def refreshview(self):
pass
def copy(self):
genericcopy = self.get_generic().copy(identical=False)
return genericcopy.outputinteractive()
class StaticDrawingPoint(InteractiveVertexBase):
radius = 5
z_below = 100
z_above = 105
def __init__(self, *args, **kwargs):
super(StaticDrawingPoint, self).__init__(*args, **kwargs)
def refreshview(self):
pass
| mit | 5,964,751,136,130,698,000 | 27.905263 | 91 | 0.676621 | false | 3.956772 | false | false | false |
ptaq666/Mechatronika_2015-16_Proj_TN | Piatkowski/Update 2 - 18.05.2016 - program w pythonie.py | 1 | 1295 | from picamera.array import PiRGBArray
from picamera import PiCamera
import time
import cv2
import numpy as np
# inicjalizacja kamery, referencja do przechwyconego obrazu
camera = PiCamera()
camera.resolution = (640, 480)
camera.framerate = 32
rawCapture = PiRGBArray(camera, size=(640, 480))
# chwila na rozgrzanie kamerki
time.sleep(0.1)
# przyechwytywanie klatek
for frame in camera.capture_continuous(rawCapture, format="bgr", use_video_port=True):
image = frame.array
#parametry bloba
params=cv2.SimpleBlobDetector_Params()
#params.minThreshold = 150
#params.maxThreshold = 255
params.filterByColor = True
params.blobColor=255
#detekcja bloba
detector = cv2.SimpleBlobDetector_create(params)
keypoints = detector.detect(image)
im_with_keypoints = cv2.drawKeypoints(image, keypoints, image, (0,0,255) , cv2.DRAW_MATCHES_FLAGS_DRAW_RICH_KEYPOINTS)
surf = cv2.xfeatures2d.SURF_create(400)
kp, des = surf.detectAndCompute(image,None)
# wywietlanie klatek
cv2.imshow("Keypoints", im_with_keypoints)
key = cv2.waitKey(1) & 0xFF
# wyczyszczenie stumienia dla nastpnej klatki
rawCapture.truncate(0)
# nacisnij q aby zakonczyc
if key == ord("q"):
break | gpl-3.0 | 9,041,613,082,836,389,000 | 25.595745 | 122 | 0.697297 | false | 2.85872 | false | false | false |
johankaito/fufuka | microblog/flask/venv/lib/python2.7/site-packages/celery/worker/autoscale.py | 5 | 4864 | # -*- coding: utf-8 -*-
"""
celery.worker.autoscale
~~~~~~~~~~~~~~~~~~~~~~~
This module implements the internal thread responsible
for growing and shrinking the pool according to the
current autoscale settings.
The autoscale thread is only enabled if :option:`--autoscale`
has been enabled on the command-line.
"""
from __future__ import absolute_import
import os
import threading
from time import sleep
from kombu.async.semaphore import DummyLock
from celery import bootsteps
from celery.five import monotonic
from celery.utils.log import get_logger
from celery.utils.threads import bgThread
from . import state
from .components import Pool
__all__ = ['Autoscaler', 'WorkerComponent']
logger = get_logger(__name__)
debug, info, error = logger.debug, logger.info, logger.error
AUTOSCALE_KEEPALIVE = float(os.environ.get('AUTOSCALE_KEEPALIVE', 30))
class WorkerComponent(bootsteps.StartStopStep):
label = 'Autoscaler'
conditional = True
requires = (Pool, )
def __init__(self, w, **kwargs):
self.enabled = w.autoscale
w.autoscaler = None
def create(self, w):
scaler = w.autoscaler = self.instantiate(
w.autoscaler_cls,
w.pool, w.max_concurrency, w.min_concurrency,
worker=w, mutex=DummyLock() if w.use_eventloop else None,
)
return scaler if not w.use_eventloop else None
def register_with_event_loop(self, w, hub):
w.consumer.on_task_message.add(w.autoscaler.maybe_scale)
hub.call_repeatedly(
w.autoscaler.keepalive, w.autoscaler.maybe_scale,
)
class Autoscaler(bgThread):
def __init__(self, pool, max_concurrency,
min_concurrency=0, worker=None,
keepalive=AUTOSCALE_KEEPALIVE, mutex=None):
super(Autoscaler, self).__init__()
self.pool = pool
self.mutex = mutex or threading.Lock()
self.max_concurrency = max_concurrency
self.min_concurrency = min_concurrency
self.keepalive = keepalive
self._last_action = None
self.worker = worker
assert self.keepalive, 'cannot scale down too fast.'
def body(self):
with self.mutex:
self.maybe_scale()
sleep(1.0)
def _maybe_scale(self, req=None):
procs = self.processes
cur = min(self.qty, self.max_concurrency)
if cur > procs:
self.scale_up(cur - procs)
return True
elif cur < procs:
self.scale_down((procs - cur) - self.min_concurrency)
return True
def maybe_scale(self, req=None):
if self._maybe_scale(req):
self.pool.maintain_pool()
def update(self, max=None, min=None):
with self.mutex:
if max is not None:
if max < self.max_concurrency:
self._shrink(self.processes - max)
self.max_concurrency = max
if min is not None:
if min > self.min_concurrency:
self._grow(min - self.min_concurrency)
self.min_concurrency = min
return self.max_concurrency, self.min_concurrency
def force_scale_up(self, n):
with self.mutex:
new = self.processes + n
if new > self.max_concurrency:
self.max_concurrency = new
self.min_concurrency += 1
self._grow(n)
def force_scale_down(self, n):
with self.mutex:
new = self.processes - n
if new < self.min_concurrency:
self.min_concurrency = max(new, 0)
self._shrink(min(n, self.processes))
def scale_up(self, n):
self._last_action = monotonic()
return self._grow(n)
def scale_down(self, n):
if n and self._last_action and (
monotonic() - self._last_action > self.keepalive):
self._last_action = monotonic()
return self._shrink(n)
def _grow(self, n):
info('Scaling up %s processes.', n)
self.pool.grow(n)
self.worker.consumer._update_prefetch_count(n)
def _shrink(self, n):
info('Scaling down %s processes.', n)
try:
self.pool.shrink(n)
except ValueError:
debug("Autoscaler won't scale down: all processes busy.")
except Exception as exc:
error('Autoscaler: scale_down: %r', exc, exc_info=True)
self.worker.consumer._update_prefetch_count(-n)
def info(self):
return {'max': self.max_concurrency,
'min': self.min_concurrency,
'current': self.processes,
'qty': self.qty}
@property
def qty(self):
return len(state.reserved_requests)
@property
def processes(self):
return self.pool.num_processes
| apache-2.0 | 8,019,987,817,766,772,000 | 29.024691 | 70 | 0.590872 | false | 3.928918 | false | false | false |
Naoto-Imamachi/MIRAGE | scripts/mirage_run.py | 1 | 4126 | #!/usr/bin/env python
"""
MIRAGE: Comprehensive miRNA target prediction pipeline.
Created by Naoto Imamachi on 2015-04-23.
Copyright (c) 2015 Naoto Imamachi. All rights reserved.
Updated and maintained by Naoto Imamachi since Apr 2015.
Usage:
mirage.py <analysis_type> <miRNA.fasta> <targetRNA.fasta> [options]
"""
import os, sys
import argparse
import runpy
import utils.setting_utils as utils
from parameter.common_parameters import common_parameters
def greeting(parser=None):
print ("MIRAGE v.0.1.0-beta - Comprehensive miRNA target prediction pipeline")
print ("-" * 20)
if parser is not None:
parser.print_help()
def main():
parser = argparse.ArgumentParser(prog='mirage',description='MIRAGE - Comprehensive miRNA target prediction pipeline')
parser.add_argument('analysis_type',action='store',help='Analysis_type: Choose estimation or prediction',choices=['estimation','prediction'])
parser.add_argument('mirna_fasta',action='store',help='miRNA fasta file: Specify miRNA fasta file to use the analysis')
parser.add_argument('targetrna_fasta',action='store',help='TargetRNA fasta file: Specify TargetRNA fasta file to use the analysis')
parser.add_argument('-m','--mirna-conservation-score-file',action='store',dest='mirna_conservation',help='Conservation score file about miRNA: Specify your conservation score db file. MIRAGE preparetion toolkits enables you to make the score files about TargetRNA or miRNA bed files.')
parser.add_argument('-t','--targetrna-conservation-score-file',action='store',dest='targetrna_conservation',help='Conservation score file about TargetRNA: Specify your conservation score db file. MIRAGE preparetion toolkits enables you to make the score files about TargetRNA or miRNA bed files.')
args = parser.parse_args()
#Start analysis - logging
greeting()
utils.now_time("MIRAGE miRNA target prediction starting...")
analysis_type = args.analysis_type
mirna_fasta_path = args.mirna_fasta
targetrna_fasta_path = args.targetrna_fasta
mirna_conservation_score = args.mirna_conservation
targetrna_conservation_score = args.targetrna_conservation
#Check fasta files
if not os.path.isfile(mirna_fasta_path):
print ("Error: miRNA fasta file does not exist...")
sys.exit(1)
if not os.path.isfile(targetrna_fasta_path):
print ("Error: TargetRNA fasta file does not exist...")
#Check conservation score db files
#if
#parameters
param = dict(
MIRNA_FASTA_PATH = mirna_fasta_path,
TARGETRNA_FASTA_PATH = targetrna_fasta_path,
)
common_parameters.update(param)
p = utils.Bunch(common_parameters)
print ('miRNA_Fasta_file: ' + p.MIRNA_FASTA_PATH,end="\n")
print ('TargetRNA_Fasta_file: ' + p.TARGETRNA_FASTA_PATH,end="\n")
'''
mirna_dict = utils.load_fasta(mirna_fasta_path)
#print (mirna_dict['hsa-miR-34b-5p|MIMAT0000685'],end="\n")
#print (mirna_dict['hsa-miR-20a-5p|MIMAT0000075'],end="\n")
targetrna_dict = utils.load_fasta(targetrna_fasta_path)
#print (targetrna_dict['NM_000594'],end="\n")
#print (targetrna_dict['NM_030938'],end="\n")
query_mirna.update(mirna_dict)
print (query_mirna)
mirna = utils.Bunch(query_mirna)
query_targetrna.update(targetrna_dict)
targetrna = utils.Bunch(query_targetrna)
if hasattr (mirna,'hsa-miR-34b-5p|MIMAT0000685'):
print ("OK!!")
print (mirna.items())
sys.exit(0)
else:
print ("Error...")
sys.exit(1)
#test = targetrna.'NM_000594'
#print (test,end="\n")
#sys.exit(0)
'''
#runpy - choose analysis type
if analysis_type == 'estimation':
runpy.run_module('module.estimate',run_name="__main__",alter_sys=True)
elif analysis_type == 'prediction':
runpy.run_module('module.predict',run_name="__main__",alter_sys=True)
else:
print ('Error: Analysis type is wrong...')
sys.exit(1)
if __name__ == '__main__':
main()
| mit | -6,552,360,622,212,059,000 | 38.851485 | 301 | 0.672079 | false | 3.359935 | false | false | false |
Jordan-Zhu/EdgeSegmentFitting | Unit_Tests/lineseg_unit_test.py | 1 | 4388 | import cv2
import numpy as np
from lineseg import lineseg
from drawedgelist import drawedgelist
def find_contours(im):
# im = cv2.imread('circle.png')
imgray = cv2.cvtColor(im, cv2.COLOR_BGR2GRAY)
ret, thresh = cv2.threshold(imgray, 127, 255, 0)
contours, hierarchy = cv2.findContours(thresh, cv2.RETR_CCOMP, cv2.CHAIN_APPROX_SIMPLE)
# cv2.RETR_EXTERNAL cv2.RETR_CCOMP
# show contours
print contours
print hierarchy
# Just consider the contours that don't have a child
# that is hierarchy[i][2] < 0
# print hierarchy[0][1, 2]
print contours[0]
newcontours = []
for i in xrange(len(contours)):
print hierarchy[0][i, 2]
if hierarchy[0][i, 2] < 0:
print hierarchy[0][i, 2]
newcontours.append(contours[i])
cv2.drawContours(im, newcontours, 2, (0, 255, 0), 1)
contours = newcontours
# Display the image.
cv2.imshow("window", im)
cv2.waitKey(0)
cv2.destroyAllWindows()
return contours
if __name__ == '__main__':
img = cv2.imread('canny_img2.png')
data = np.asarray(find_contours(img))
# print 'data shape ', data.shape[0]
seglist = lineseg(data, tol=2)
# ValueError: The truth value of an array with more than one element is ambiguous. Use a.any() or a.all()
# print seglist
# for index, item in enumerate(seglist):
# print index
drawedgelist(seglist, rowscols=[480, 640])
# for i in seglist[0][:, 0]:
# x.append(seglist[0][i, 0])
# x = seglist[1][:, 0]
# y = seglist[1][:, 1]
# print 'x ', x[0]
# print 'y ', seglist[0][:, 1]
# for n in range(x.shape[0] - 1):
# cv2.line(img, (x[n], y[n]), (x[n + 1], y[n + 1]), (0, 255, 255), thickness=2)
# plt.plot(x[n], y[n])
# plt.hold('on')
# plt.show()
# cv2.imshow("window", img)
# cv2.waitKey(0)
# cv2.destroyAllWindows()
# reshape array
# temp = []
# for i in xrange(data.shape[0]):
# arr = np.squeeze(data[i])
# temp.append(arr)
# temp = np.asarray(temp)
# print 'x ', temp[0][:, 0]
# print 'y ', temp[0][:, 1]
# rebuilt = np.concatenate((data[0], data[1]), axis=0)
# print 'new shape ', rebuilt.shape
# y = len(test)
# test = np.resize(test, (lent, y, 1, 2))
# print data
# Input cell array of edgelists and tolerance.
# seglist = lineseg(data, len, tol=2)
# print seglist.dtype
# colon indicates to go through all the elements in this dimension.
# x = seglist[0, :, 0]
# y = seglist[0, :, 1]
# print 'x ', data.shape
# print 'y ', y
# print y.shape[0]
# pts = np.asarray([x, y], dtype=np.int32)
# print pts.dtype
# cv2.polylines(img, [x, y], False, (0, 255, 255))
# list = np.asarray(seglist[0], dtype=np.int32)
# print list.shape
# print seglist[0]
# seglist = seglist.reshape((-1, 1, 2))
# print 'seglist: ', seglist.shape[0]
# print 'seglist shape: ', type(seglist[0])
# draw = drawedgelist(list, rowscols=[480, 640])
# edgelist = np.reshape(edgelist, (edgelist.shape[0], -1, 1, 2))
# num_edges = edgelist.shape[1]
# print 'edgelist shape', edgelist.shape, ' length ', num_edges
# edgelist = np.expand_dims(edgelist, axis = 1)
# print 'length = ', edgelist.shape[0]
# print 'edgelist shape = ', edgelist.shape
# edgelist = find_contours()
# edgelist = np.asarray(find_contours(img))
# y = np.concatenate(edgelist, axis=0)
# y = np.expand_dims(y, axis=0)
# edgelist = np.reshape(edgelist, (-1, 1, 2))
# print 'shape = ', y
# print y[:, 0, 1]
# shape (num arrays, length of array in total, num rows = 1, num cols = 2)
# seglist = []
# print edgelist
# print edgelist[0, 1, 0, 0]
# print edgelist[0, 0, 0, 0]
# x = np.empty(num_edges)
# y = np.empty(num_edges)
# np.copyto(x, edgelist[0, :, 0, 0])
# np.copyto(y, edgelist[0, :, 0, 1])
#
# z = [1, 2, 3, 4, 5]
# print len(z)
# print z.index(5)
# seglist.append([z[4], z[4]])
# print seglist
# seglist.append([x[0:4], y[0:4]])
# print seglist
# print 'x coordinates'
# for i in xrange(0, num_edges):
# x[i] = edgelist[0, i, 0, 0]
#
# print 'y coordinates'
# for j in xrange(0, num_edges):
# print edgelist[0, j, 0, 1]
# print 'x = ', x | agpl-3.0 | -2,009,959,262,978,577,000 | 27.134615 | 109 | 0.567001 | false | 2.883049 | false | false | false |
Iristyle/ChocolateyPackages | EthanBrown.SublimeText2.GitPackages/tools/PackageCache/Git/flow.py | 3 | 3101 | import sublime
from git import GitWindowCommand
class GitFlowCommand(GitWindowCommand):
def is_visible(self):
s = sublime.load_settings("Git.sublime-settings")
if s.get('flow'):
return True
class GitFlowFeatureStartCommand(GitFlowCommand):
def run(self):
self.get_window().show_input_panel('Enter Feature Name:', '', self.on_done, None, None)
def on_done(self, feature_name):
self.run_command(['git-flow', 'feature', 'start', feature_name])
class GitFlowFeatureFinishCommand(GitFlowCommand):
def run(self):
self.run_command(['git-flow', 'feature'], self.feature_done)
def feature_done(self, result):
self.results = result.rstrip().split('\n')
self.quick_panel(self.results, self.panel_done,
sublime.MONOSPACE_FONT)
def panel_done(self, picked):
if 0 > picked < len(self.results):
return
picked_feature = self.results[picked]
if picked_feature.startswith("*"):
picked_feature = picked_feature.strip("*")
picked_feature = picked_feature.strip()
self.run_command(['git-flow', 'feature', 'finish', picked_feature])
class GitFlowReleaseStartCommand(GitFlowCommand):
def run(self):
self.get_window().show_input_panel('Enter Version Number:', '', self.on_done, None, None)
def on_done(self, release_name):
self.run_command(['git-flow', 'release', 'start', release_name])
class GitFlowReleaseFinishCommand(GitFlowCommand):
def run(self):
self.run_command(['git-flow', 'release'], self.release_done)
def release_done(self, result):
self.results = result.rstrip().split('\n')
self.quick_panel(self.results, self.panel_done,
sublime.MONOSPACE_FONT)
def panel_done(self, picked):
if 0 > picked < len(self.results):
return
picked_release = self.results[picked]
if picked_release.startswith("*"):
picked_release = picked_release.strip("*")
picked_release = picked_release.strip()
self.run_command(['git-flow', 'release', 'finish', picked_release])
class GitFlowHotfixStartCommand(GitFlowCommand):
def run(self):
self.get_window().show_input_panel('Enter hotfix name:', '', self.on_done, None, None)
def on_done(self, hotfix_name):
self.run_command(['git-flow', 'hotfix', 'start', hotfix_name])
class GitFlowHotfixFinishCommand(GitFlowCommand):
def run(self):
self.run_command(['git-flow', 'hotfix'], self.hotfix_done)
def hotfix_done(self, result):
self.results = result.rstrip().split('\n')
self.quick_panel(self.results, self.panel_done,
sublime.MONOSPACE_FONT)
def panel_done(self, picked):
if 0 > picked < len(self.results):
return
picked_hotfix = self.results[picked]
if picked_hotfix.startswith("*"):
picked_hotfix = picked_hotfix.strip("*")
picked_hotfix = picked_hotfix.strip()
self.run_command(['git-flow', 'hotfix', 'finish', picked_hotfix])
| mit | 3,764,561,798,035,181,600 | 33.455556 | 97 | 0.633022 | false | 3.639671 | false | false | false |
orotib/RetroBookDownloader | retro.py | 1 | 2684 | #!/usr/bin/env python
#!encoding: utf-8
"""
Retro Book Downloader
A simple python script for downloading retro books from pcvilag.muskatli.hu.
All books on this website: http://pcvilag.muskatli.hu/
Written by Tibor Oros, 2015 (oros.tibor0@gmail.com)
Recommended version: Python 2.7
"""
import os
import shutil
import sys
import urllib
import urllib2
from bs4 import BeautifulSoup
FOLDERNAME = 'temp'
def makeDir(name):
if not(os.path.exists(name)):
os.mkdir(name)
os.chdir(name)
else:
shutil.rmtree(name)
os.mkdir(name)
os.chdir(name)
def getProjectName(url):
return url.split('/')[5]
def makeLinkURL(mainUrl, projectName):
return mainUrl + projectName + '/link.php'
def makeDownloadURL(mainUrl, projectName):
return mainUrl + projectName + '/'
def getLinkName(link):
return link.get('href').split('=')[1]
def openURL(linkUrl):
tmp = urllib2.urlopen(linkUrl)
soup = BeautifulSoup(tmp)
return soup.find_all('a')
def downloadImages(links, downloadURL, errorItem):
for link in links:
if len(link.get('href').split('=')) == 2:
try:
pName = getLinkName(link)
urllib.urlretrieve(downloadURL + pName, pName)
print 'Downloaded image: ' + pName
except IOError:
print 'Image does not exist: ' + pName
errorItem.append(pName)
except:
print 'Unknown error'
def deleteDir(name):
os.chdir('..')
shutil.rmtree(name)
def errorTest(ei):
if len(ei) != 0:
print '--- Missing image(s) ---'
for i in ei:
print i
def main():
mainURL = 'http://pcvilag.muskatli.hu/irodalom/cbooks/'
URL = raw_input('Book URL: ')
try:
projectName = getProjectName(URL)
linkURL = makeLinkURL(mainURL, projectName)
downloadURL = makeDownloadURL(mainURL, projectName)
links = openURL(linkURL)
except (urllib2.URLError, IndexError):
print '*** Wrong URL ***'
print 'Example: http://pcvilag.muskatli.hu/irodalom/cbooks/njk64/njk64.html'
sys.exit()
makeDir(FOLDERNAME)
errorItem = []
print 'Program downloading...'
downloadImages(links, downloadURL, errorItem)
print 'Downloading complete.'
print 'Program converting...'
os.system('convert *.jpg ../' + projectName + '.pdf')
print 'Converting complete.'
deleteDir(FOLDERNAME)
errorTest(errorItem)
raw_input('Press enter to exit.')
######################################################
if __name__ == '__main__':
main()
| gpl-2.0 | 2,992,041,773,091,996,700 | 23.4 | 84 | 0.59687 | false | 3.593039 | false | false | false |
hazelcast/hazelcast-python-client | hazelcast/protocol/codec/transactional_map_put_if_absent_codec.py | 1 | 1192 | from hazelcast.serialization.bits import *
from hazelcast.protocol.builtin import FixSizedTypesCodec
from hazelcast.protocol.client_message import OutboundMessage, REQUEST_HEADER_SIZE, create_initial_buffer
from hazelcast.protocol.builtin import StringCodec
from hazelcast.protocol.builtin import DataCodec
from hazelcast.protocol.builtin import CodecUtil
# hex: 0x0E0800
_REQUEST_MESSAGE_TYPE = 919552
# hex: 0x0E0801
_RESPONSE_MESSAGE_TYPE = 919553
_REQUEST_TXN_ID_OFFSET = REQUEST_HEADER_SIZE
_REQUEST_THREAD_ID_OFFSET = _REQUEST_TXN_ID_OFFSET + UUID_SIZE_IN_BYTES
_REQUEST_INITIAL_FRAME_SIZE = _REQUEST_THREAD_ID_OFFSET + LONG_SIZE_IN_BYTES
def encode_request(name, txn_id, thread_id, key, value):
buf = create_initial_buffer(_REQUEST_INITIAL_FRAME_SIZE, _REQUEST_MESSAGE_TYPE)
FixSizedTypesCodec.encode_uuid(buf, _REQUEST_TXN_ID_OFFSET, txn_id)
FixSizedTypesCodec.encode_long(buf, _REQUEST_THREAD_ID_OFFSET, thread_id)
StringCodec.encode(buf, name)
DataCodec.encode(buf, key)
DataCodec.encode(buf, value, True)
return OutboundMessage(buf, False)
def decode_response(msg):
msg.next_frame()
return CodecUtil.decode_nullable(msg, DataCodec.decode)
| apache-2.0 | -2,616,293,637,833,377,300 | 38.733333 | 105 | 0.777685 | false | 3.145119 | false | false | false |
cryvate/project-euler | project_euler/framework/solve.py | 1 | 4943 | #!/usr/bin/env python
"""Solver for Project Euler problems.
Usage:
solve <problem_number>
solve (-h | --help)
Options:
-h --help Show this screen.
"""
from importlib import import_module
from os.path import join, split
import time
import warnings
from typing import Any, Callable, Tuple
from docopt import docopt
from termcolor import colored # noqa: F401
import yaml
import project_euler.solutions # noqa: F401
from project_euler.solutions.problems import slow_numbers as slow_problems
spec = '{:4.2f}'
MINUTE_RULE = 60
SLOW = 10
SOLVE_MSG = ('{colored("[PE-" + str(problem_number) +"]", status_colour)} '
'{colored(str(answer), "green") if answer_correct else colored(str(answer) + " != " + str(reference_answer), "red")} ' # noqa: E501
'{colored("[" + spec.format(spent) + "s" + "!" * (minute_violated + slow_violated) + "]", "green" if spent <= slow else ("yellow" if spent <= minute_rule else "red"))}') # noqa: E501
SOLVE_MSG_E = ''
class SolveException(Exception):
pass
class ProblemMalformed(SolveException):
pass
class SolutionWrong(SolveException):
pass
class AnswerVerificationFailed(SolutionWrong):
pass
class OneMinuteRuleViolation(SolutionWrong):
pass
def solve_problem(problem_number: int,
solve: Callable[[], Any]=None,
minute_rule: float=None,
slow: float=None) -> Tuple[str, float]:
if not minute_rule:
minute_rule = MINUTE_RULE
if not slow:
slow = SLOW
file_name = f'problem_{problem_number}.yaml'
file_path = join(join(split(__file__)[0], '..', 'problems', file_name))
with open(file_path) as f:
parameters = yaml.load(f)
parameters['title']
parameters['description']
reference_answer = parameters['answer_b64'].decode()
parameters['strategy']
if not solve:
problem_module = import_module(f'.problem_{problem_number}',
package='project_euler.solutions')
solve = problem_module.solve
reference_answer = parameters['answer_b64'].decode()
start = time.time()
try:
answer = str(solve())
# often more natural to return int
except Exception as e:
answer = str(type(e))[8:-2] + "_occured"
spent = time.time() - start
answer_correct = answer == reference_answer
minute_violated = spent > minute_rule
slow_violated = spent > slow
status_colour_time = 'green' if slow_violated else ( # NOQA: F841
'yellow' if minute_violated else 'red')
status_colour = 'green' if answer_correct and not slow_violated else ( # noqa: F841,E501
'yellow' if answer_correct and not minute_violated else 'red')
print(eval('f' + repr(SOLVE_MSG)))
raise
spent = time.time() - start
answer_correct = answer == reference_answer
minute_violated = spent > minute_rule
slow_violated = spent > slow
status_colour_time = 'green' if slow_violated else ( # NOQA: F841
'yellow' if minute_violated else 'red')
status_colour = 'green' if answer_correct and not slow_violated else ( # noqa: F841,E501
'yellow' if answer_correct and not minute_violated else 'red')
print(eval('f' + repr(SOLVE_MSG)))
if not answer_correct:
raise AnswerVerificationFailed(
f'In problem {problem_number} the calculated answer is '
f'{answer} ({spec.format(spent)}s), the reference answer is '
f'{reference_answer}.')
if minute_violated:
if problem_number in slow_problems:
slower_time = slow_problems[problem_number]
if spent > slower_time:
raise OneMinuteRuleViolation(
f'Problem {problem_number} took {spec.format(spent)}s,'
f' which is more than the {slower_time}s it is '
f'allowed to take.')
else:
warnings.warn(
f'Problem {problem_number} took {spec.format(spent)}s,'
f' which is less than the {slower_time}s it is allowed'
f' to take, but more than {minute_rule}s.',
UserWarning)
else:
raise OneMinuteRuleViolation(
f'Problem {problem_number} took {spec.format(spent)}s, '
f'which is more than a minute!')
elif slow_violated:
warnings.warn(
f'Problem {problem_number} took {spec.format(spent)}s,'
f' which is more than {slow}s.', UserWarning)
return answer, spent
if __name__ == '__main__':
arguments = docopt(__doc__)
problem_number = arguments['<problem_number>']
try:
with warnings.catch_warnings():
warnings.simplefilter("ignore")
solve_problem(problem_number)
except SolveException:
pass
| mit | -2,727,328,987,806,650,000 | 29.140244 | 196 | 0.599029 | false | 3.816988 | false | false | false |
Mercy-Nekesa/sokoapp | sokoapp/coupons/migrations/0001_initial.py | 2 | 5104 | # -*- coding: utf-8 -*-
import datetime
from south.db import db
from south.v2 import SchemaMigration
from django.db import models
class Migration(SchemaMigration):
def forwards(self, orm):
# Adding model 'Coupon'
db.create_table('coupons_coupon', (
('id', self.gf('django.db.models.fields.AutoField')(primary_key=True)),
('value', self.gf('django.db.models.fields.IntegerField')()),
('code', self.gf('django.db.models.fields.CharField')(unique=True, max_length=30, blank=True)),
('type', self.gf('django.db.models.fields.CharField')(max_length=20)),
('user', self.gf('django.db.models.fields.related.ForeignKey')(to=orm['auth.User'], null=True, blank=True)),
('created_at', self.gf('django.db.models.fields.DateTimeField')(auto_now_add=True, blank=True)),
('redeemed_at', self.gf('django.db.models.fields.DateTimeField')(null=True, blank=True)),
))
db.send_create_signal('coupons', ['Coupon'])
def backwards(self, orm):
# Deleting model 'Coupon'
db.delete_table('coupons_coupon')
models = {
'auth.group': {
'Meta': {'object_name': 'Group'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '80'}),
'permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'})
},
'auth.permission': {
'Meta': {'ordering': "('content_type__app_label', 'content_type__model', 'codename')", 'unique_together': "(('content_type', 'codename'),)", 'object_name': 'Permission'},
'codename': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'content_type': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['contenttypes.ContentType']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '50'})
},
'auth.user': {
'Meta': {'object_name': 'User'},
'date_joined': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'email': ('django.db.models.fields.EmailField', [], {'max_length': '75', 'blank': 'True'}),
'first_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'groups': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Group']", 'symmetrical': 'False', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_active': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'is_staff': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'is_superuser': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'last_login': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'last_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'password': ('django.db.models.fields.CharField', [], {'max_length': '128'}),
'user_permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'}),
'username': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '30'})
},
'contenttypes.contenttype': {
'Meta': {'ordering': "('name',)", 'unique_together': "(('app_label', 'model'),)", 'object_name': 'ContentType', 'db_table': "'django_content_type'"},
'app_label': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'model': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'})
},
'coupons.coupon': {
'Meta': {'ordering': "['created_at']", 'object_name': 'Coupon'},
'code': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '30', 'blank': 'True'}),
'created_at': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'redeemed_at': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
'type': ('django.db.models.fields.CharField', [], {'max_length': '20'}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['auth.User']", 'null': 'True', 'blank': 'True'}),
'value': ('django.db.models.fields.IntegerField', [], {})
}
}
complete_apps = ['coupons'] | mit | 5,946,749,823,817,242,000 | 64.448718 | 182 | 0.559757 | false | 3.65616 | false | false | false |
liosha2007/temporary-groupdocs-python3-sdk | groupdocs/models/JobOutputDocument.py | 1 | 1667 | #!/usr/bin/env python
"""
Copyright 2012 GroupDocs.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
class JobOutputDocument:
"""
NOTE: This class is auto generated by the swagger code generator program.
Do not edit the class manually."""
def __init__(self):
self.swaggerTypes = {
'error': 'str',
'name': 'str',
'version': 'int',
'size': 'int',
'type': 'str',
'type_str': 'str',
'file_type_str': 'str',
'document_path': 'str',
'access': 'str',
'url': 'str',
'file_type': 'str',
'id': 'float',
'guid': 'str'
}
self.error = None # str
self.name = None # str
self.version = None # int
self.size = None # int
self.type = None # str
self.type_str = None # str
self.file_type_str = None # str
self.document_path = None # str
self.access = None # str
self.url = None # str
self.file_type = None # str
self.id = None # float
self.guid = None # str
| apache-2.0 | -4,815,947,096,797,974,000 | 28.767857 | 77 | 0.556089 | false | 4.209596 | false | false | false |
brew/fruitynutters | webapps/fruitynutters/cart/migrations/0001_initial.py | 1 | 3090 | # -*- coding: utf-8 -*-
# Generated by Django 1.9.2 on 2016-02-22 11:39
from __future__ import unicode_literals
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
initial = True
dependencies = [
('catalogue', '__first__'),
]
operations = [
migrations.CreateModel(
name='Cart',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('date_created', models.DateField(auto_now_add=True)),
('date_updated', models.DateField(auto_now=True)),
('cart_comment', models.TextField(blank=True, default=b'', null=True)),
('cart_username', models.CharField(blank=True, default=b'', max_length=60, null=True)),
('cart_useremail', models.CharField(blank=True, default=b'', max_length=60, null=True)),
('cart_userphone', models.CharField(blank=True, default=b'', max_length=60, null=True)),
],
options={
'verbose_name': 'Shopping Cart',
'verbose_name_plural': 'Shopping Carts',
},
),
migrations.CreateModel(
name='CartItem',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('quantity', models.IntegerField(verbose_name=b'Quantity')),
('cart', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='cart.Cart', verbose_name=b'Cart')),
('cart_bundle', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, related_name='bundle_owner', to='cart.Cart')),
('product', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='catalogue.Item', verbose_name=b'Catalogue Item')),
],
options={
'verbose_name': 'Cart Item',
'verbose_name_plural': 'Cart Items',
},
),
migrations.CreateModel(
name='CartVirtualShopItem',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('name', models.CharField(max_length=140)),
('quantity', models.IntegerField(verbose_name=b'Quantity')),
('cart', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='cart.Cart', verbose_name=b'Cart')),
],
),
migrations.CreateModel(
name='CartWriteinItem',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('name', models.CharField(max_length=140)),
('code', models.CharField(max_length=20)),
('cart', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='cart.Cart', verbose_name=b'Cart')),
],
),
]
| mit | -7,955,866,565,930,108,000 | 45.818182 | 164 | 0.567961 | false | 4.098143 | false | false | false |
ViderumGlobal/ckanext-orgdashboards | ckanext/orgdashboards/logic/action.py | 1 | 2689 | import logging
import json
from pylons import config
from ckan import logic
import ckan.plugins as p
import ckan.lib.helpers as h
import ckan.logic.action.update as update_core
import ckan.logic.action.get as get_core
from ckanext.orgdashboards.helpers import (_get_action,
orgdashboards_get_geojson_properties)
log = logging.getLogger(__name__)
@p.toolkit.side_effect_free
def orgdashboards_show_datasets(context, data_dict):
dd = data_dict.copy()
dd.update({'include_datasets': True})
data = _get_action('organization_show', context.copy(), dd)
return data.pop('packages', [])
@p.toolkit.side_effect_free
def orgdashboards_dataset_show_resources(context, data_dict):
data = _get_action('package_show', context.copy(), data_dict)
return data.pop('resources', [])
@p.toolkit.side_effect_free
def orgdashboards_resource_show_resource_views(context, data_dict):
data = _get_action('resource_view_list', context.copy(), data_dict)
data = filter(lambda i: i['view_type'] == data_dict['view_type'], data)
return data
@p.toolkit.side_effect_free
def orgdashboards_resource_show_map_properties(context, data_dict):
return orgdashboards_get_geojson_properties(data_dict.get('id'))
@p.toolkit.side_effect_free
def orgdashboards_get_map_main_property(context, data_dict):
dd = data_dict.copy()
dd.update({'include_datasets': True})
data = _get_action('organization_show', context.copy(), dd)
if 'orgdashboards_map_main_property' in data:
return data['orgdashboards_map_main_property']
else:
return None
def organization_update(context, data_dict):
# Keep data for charts in one key (orgdashboards_charts) so that many
# charts can be stored
charts = []
for k, v in data_dict.items():
item = {}
if k.startswith('orgdashboards_chart_') and \
not k.endswith('subheader'):
id = k.split('_')[-1]
item['order'] = int(id)
item['resource_view_id'] = v
item['subheader'] = data_dict.get(
'orgdashboards_chart_{}_subheader'.format(id)
)
charts.append(item)
if charts:
data_dict['orgdashboards_charts'] = json.dumps(charts)
return update_core.organization_update(context, data_dict)
@p.toolkit.side_effect_free
def organization_show(context, data_dict):
data = get_core.organization_show(context, data_dict)
charts = data.get('orgdashboards_charts')
if charts:
data['orgdashboards_charts'] = json.loads(charts)
sorted(data['orgdashboards_charts'], key=lambda k: k['order'])
return data
| agpl-3.0 | 7,214,593,898,849,147,000 | 28.877778 | 80 | 0.662328 | false | 3.561589 | false | false | false |
kgullikson88/DSD-Paper | plotters/make_average_detrate_plot.py | 1 | 4156 | """
This script goes through the marginalized detection rate files,
and gets the average detection rate as a function of temperature.
"""
from __future__ import print_function, division
import pandas as pd
import numpy as np
import sys
import os
import datetime
import matplotlib.pyplot as plt
import seaborn as sns
import plottools
sns.set_context('paper', font_scale=2.0)
sns.set_style('white')
sns.set_style('ticks')
import get_parameters
# Some constants
SAMPLE_HIP = [1366, 3300, 12719, 13165, 15338, 17563, 22840, 22958, 24902,
26063, 26563, 28691, 33372, 44127, 58590, 65477, 76267, 77516,
77858, 79199, 79404, 81641, 84606, 85385, 88290, 89156, 91118,
92027, 92728, 98055, 100221, 106786, 113788, 116247, 116611]
SAMPLE_STARS = ['HIP {}'.format(hip) for hip in SAMPLE_HIP]
BASE_DIR = '{}/School/Research'.format(os.environ['HOME'])
INSTRUMENT_DIRS = dict(TS23='{}/McDonaldData/'.format(BASE_DIR),
HRS='{}/HET_data/'.format(BASE_DIR),
CHIRON='{}/CHIRON_data/'.format(BASE_DIR),
IGRINS='{}/IGRINS_data/'.format(BASE_DIR))
def get_undetected_stars(star_list=SAMPLE_STARS):
"""
Get the undetected stars from my sample.
"""
full_sample = get_parameters.read_full_sample()
full_sample['Parsed_date'] = full_sample.Date.map(get_parameters.convert_dates)
undetected = full_sample.loc[full_sample.Temperature.isnull()]
matches = undetected.loc[undetected.identifier.isin(star_list)][['identifier', 'Instrument', 'Parsed_date']].copy()
return matches
def decrement_day(date):
year, month, day = date.split('-')
t = datetime.datetime(int(year), int(month), int(day)) - datetime.timedelta(1)
return t.isoformat().split('T')[0]
def get_detection_rate(instrument, starname, date):
"""
Read in the detection rate as a function of temperature for the given parameters.
"""
directory = INSTRUMENT_DIRS[instrument]
fname = '{}{}_{}_simple.csv'.format(directory, starname.replace(' ', ''), date)
try:
df = pd.read_csv(fname)
except IOError:
try:
fname = '{}{}_{}_simple.csv'.format(directory, starname.replace(' ', ''), decrement_day(date))
df = pd.read_csv(fname)
except IOError:
print('File {} does not exist! Skipping!'.format(fname))
df = pd.DataFrame(columns=['temperature', 'detection rate', 'mean vsini'])
df['star'] = [starname] * len(df)
return df
def get_stats(df):
Teff = df.temperature.values[0]
low, med, high = np.percentile(df['detection rate'], [16, 50, 84])
return pd.Series(dict(temperature=Teff, middle=med, low_pct=low, high_pct=high))
def make_plot(stats_df):
""" Make a plot showing the average and spread of the detection rates
"""
fig, ax = plt.subplots()
fig.subplots_adjust(left=0.15, bottom=0.18, right=0.95, top=0.85)
ax.plot(stats_df.temperature, stats_df.middle, 'r--', lw=2, label='Median')
ax.fill_between(stats_df.index, stats_df.high_pct, stats_df.low_pct, alpha=0.4, color='blue')
p = plt.Rectangle((0, 0), 0, 0, color='blue', alpha=0.4, label='16th-84th Percentile')
ax.add_patch(p)
plottools.add_spt_axis(ax, spt_values=('M5', 'M0', 'K5', 'K0', 'G5', 'G0', 'F5'))
leg = ax.legend(loc=4, fancybox=True)
ax.set_xlabel('Temperature (K)')
ax.set_ylabel('Detection Rate')
ax.set_xlim((3000, 6550))
ax.set_ylim((0.0, 1.05))
return ax
if __name__ == '__main__':
sample = get_undetected_stars()
print(sample)
summary = pd.concat([get_detection_rate(inst, star, d) for inst, star, d in zip(sample.Instrument,
sample.identifier,
sample.Parsed_date)])
# Get statistics on the summary, averaging over the stars
stats = summary[['temperature', 'detection rate']].groupby('temperature').apply(get_stats)
make_plot(stats)
plt.savefig('../Figures/DetectionRate.pdf')
plt.show() | mit | 1,911,644,103,055,812,900 | 34.529915 | 119 | 0.61718 | false | 3.251956 | false | false | false |
tanderegg/mesos-spawner | mesos_spawner/executor.py | 1 | 1027 | #!/usr/bin/env python2.7
from __future__ import print_function
import sys
import time
from threading import Thread
from pymesos import MesosExecutorDriver, Executor, decode_data
class JupyterHubExecutor(Executor):
"""
May not be necessary
"""
def launchTask(self, driver, task):
def run_task(task):
update = {
'task_id': {
"value": task['task_id']['value']
},
'state': 'TASK_RUNNING',
'timestamp': time.time()
}
driver.sendStatusUpdate(update)
print(decode_data(task['data']), file=sys.stderr)
time.sleep(30)
update = {
'task_id': {
'value': task['task_id']['value']
},
'state': 'TASK_FINISHED',
'timestamp': time.time()
}
driver.sendStatusUpdate(update)
thread = Thread(target=run_task, args=(task,))
thread.start()
| mit | -4,009,985,223,627,947,000 | 26.026316 | 62 | 0.497566 | false | 4.261411 | false | false | false |
Petraea/jsonbot | jsb/lib/persist.py | 1 | 11970 | # jsb/persist.py
#
#
"""
allow data to be written to disk or BigTable in JSON format. creating
the persisted object restores data.
"""
## jsb imports
from jsb.utils.trace import whichmodule, calledfrom, callstack, where
from jsb.utils.lazydict import LazyDict
from jsb.utils.exception import handle_exception
from jsb.utils.name import stripname
from jsb.utils.locking import lockdec
from jsb.utils.timeutils import elapsedstring
from jsb.lib.callbacks import callbacks
from jsb.lib.errors import MemcachedCounterError, JSONParseError, WrongFileName
from datadir import getdatadir
## simplejson imports
from jsb.imports import getjson
json = getjson()
## basic imports
from collections import deque
import thread
import logging
import os
import os.path
import types
import copy
import sys
import time
## defines
cpy = copy.deepcopy
## locks
persistlock = thread.allocate_lock()
persistlocked = lockdec(persistlock)
## global list to keeptrack of what persist objects need to be saved
needsaving = deque()
def cleanup(bot=None, event=None):
global needsaving
#todo = cpy(needsaving)
r = []
for p in needsaving:
try: p.dosave() ; r.append(p) ; logging.warn("saved on retry - %s" % p.fn)
except (OSError, IOError), ex: logging.error("failed to save %s - %s" % (p, str(ex)))
for p in r:
try: needsaving.remove(p)
except ValueError: pass
return needsaving
got = False
from jsb.memcached import getmc
mc = getmc()
if mc:
status = mc.get_stats()
if status:
logging.warn("memcached uptime is %s" % elapsedstring(status[0][1]['uptime']))
got = True
else: logging.debug("no memcached found - using own cache")
from cache import get, set, delete
import fcntl
## classes
class Persist(object):
""" persist data attribute to JSON file. """
def __init__(self, filename, default=None, init=True, postfix=None, needexist=False):
""" Persist constructor """
if postfix: self.fn = str(filename.strip()) + str("-%s" % postfix)
else: self.fn = str(filename.strip())
if needexist and not os.path.exists(self.fn): raise WrongFileName(self.fn)
self.lock = thread.allocate_lock() # lock used when saving)
self.data = LazyDict(default=default) # attribute to hold the data
self.setlogname()
self.countername = self.fn + "_" + "counter"
if got:
count = mc.get(self.countername)
try:
self.mcounter = self.counter = int(count)
except (ValueError, TypeError):
self.mcounter = self.counter = mc.set(self.countername, "1") or 0
else:
self.mcounter = self.counter = 0
self.ssize = 0
self.jsontxt = ""
self.dontsave = False
if init:
self.init(default)
if default == None: default = LazyDict()
self.setlogname()
def setlogname(self):
try:
res = []
target = getdatadir().split(os.sep)
if not target[-1]: target = target[-2]
else: target = target[-1]
for i in self.fn.split(os.sep)[::-1]:
if target in i: break
if i.endswith(os.sep): i = i[:-1]
res.append(i)
self.logname = os.sep.join(res[::-1])
if not self.logname: self.logname = self.fn
except: handle_exception() ; self.logname = self.fn
def size(self):
return "%s (%s)" % (len(self.data), len(self.jsontxt))
def init(self, default={}, filename=None):
""" initialize the data. """
gotcache = False
cachetype = "cache"
try:
logging.debug("using name %s" % self.fn)
a = get(self.fn)
if a: self.data = a
else: self.data = None
if self.data != None:
logging.debug("got data from local cache")
return self
if got: self.jsontxt = mc.get(self.fn) ; cachetype = "cache"
if not self.jsontxt:
datafile = open(self.fn, 'r')
self.jsontxt = datafile.read()
datafile.close()
self.ssize = len(self.jsontxt)
cachetype = "file"
if got: mc.set(self.fn, self.jsontxt)
except IOError, ex:
if not 'No such file' in str(ex):
logging.error('failed to read %s: %s' % (self.fn, str(ex)))
raise
else:
logging.debug("%s doesn't exist yet" % self.fn)
self.jsontxt = json.dumps(default)
try:
if self.jsontxt:
logging.debug(u"loading: %s" % type(self.jsontxt))
try: self.data = json.loads(str(self.jsontxt))
except Exception, ex: logging.error("couldn't parse %s in the %s file" % (self.jsontxt, self.fn)) ; self.data = None ; self.dontsave = True
if not self.data: self.data = LazyDict()
elif type(self.data) == types.DictType:
logging.debug("converting dict to LazyDict")
d = LazyDict()
d.update(self.data)
self.data = d
set(self.fn, self.data)
logging.debug("loaded %s - %s" % (self.logname, cachetype))
except Exception, ex:
logging.error('ERROR: %s' % self.fn)
raise
def upgrade(self, filename):
self.init(self.data, filename=filename)
self.save(filename)
def get(self):
logging.debug("getting %s from local cache" % self.fn)
a = get(self.fn)
logging.debug("got %s from local cache" % type(a))
return a
def sync(self):
logging.debug("syncing %s" % self.fn)
if got: mc.set(self.fn, json.dumps(self.data))
set(self.fn, self.data)
return self
def save(self):
cleanup()
global needsaving
try: self.dosave()
except (IOError, OSError):
self.sync()
if self not in needsaving: needsaving.append(self)
@persistlocked
def dosave(self):
""" persist data attribute. """
try:
if self.dontsave: logging.error("dontsave is set on %s - not saving" % self.fn) ; return
fn = self.fn
if got: self.mcounter = int(mc.incr(self.countername))
if got and (self.mcounter - self.counter) > 1:
tmp = json.loads(mc.get(fn))
if tmp:
try: tmp.update(self.data) ; self.data = LazyDict(tmp) ; logging.warn("updated %s" % fn)
except AttributeError: pass
self.counter = self.mcounter
d = []
if fn.startswith(os.sep): d = [os.sep,]
for p in fn.split(os.sep)[:-1]:
if not p: continue
d.append(p)
pp = os.sep.join(d)
if not os.path.isdir(pp):
logging.warn("creating %s dir" % pp)
os.mkdir(pp)
tmp = fn + '.tmp' # tmp file to save to
datafile = open(tmp, 'w')
fcntl.flock(datafile, fcntl.LOCK_EX | fcntl.LOCK_NB)
json.dump(self.data, datafile, indent=True)
fcntl.flock(datafile, fcntl.LOCK_UN)
datafile.close()
try: os.rename(tmp, fn)
except (IOError, OSError):
os.remove(fn)
os.rename(tmp, fn)
jsontxt = json.dumps(self.data)
logging.debug("setting cache %s - %s" % (fn, jsontxt))
self.jsontxt = jsontxt
set(fn, self.data)
if got: mc.set(fn, jsontxt)
if 'sleeptime' in self.fn: logging.info('%s saved' % self.logname)
else: logging.warn('%s saved' % self.logname)
except IOError, ex: logging.error("not saving %s: %s" % (self.fn, str(ex))) ; raise
except: raise
finally: pass
## findfilenames function
def findfilenames(target, filter=[], skip=[]):
logging.debug("finding files in %s - filter: %s - skip: %s" % (target, filter, skip))
res = []
result = []
if not os.path.isdir(target): return res
if not target.endswith(os.sep): target += os.sep
for f in os.listdir(target):
for s in skip:
if s in f: continue
fname = target + f
if os.path.isdir(fname): res.extend(findfilenames(fname, filter, skip))
go = True
for fil in filter:
if fil.lower() not in fname.lower(): go = False ; break
if not go: continue
res.append(fname)
return res
def findnames(target, filter=[], skip=[]):
res = []
for f in findfilenames(target, filter, skip):
res.append(f.split(os.sep)[-1])
return res
class PlugPersist(Persist):
""" persist plug related data. data is stored in jsondata/plugs/{plugname}/{filename}. """
def __init__(self, filename, default={}, *args, **kwargs):
plugname = calledfrom(sys._getframe())
Persist.__init__(self, getdatadir() + os.sep + 'plugs' + os.sep + stripname(plugname) + os.sep + stripname(filename), default=default, *args, **kwargs)
class GlobalPersist(Persist):
""" persist plug related data. data is stored in jsondata/plugs/{plugname}/{filename}. """
def __init__(self, filename, default={}, *args, **kwargs):
if not filename: raise Exception("filename not set in GlobalPersist")
logging.warn("filename is %s" % filename)
Persist.__init__(self, getdatadir() + os.sep + 'globals' + os.sep + stripname(filename), default=default, *args, **kwargs)
## PersistCollection class
class PersistCollection(object):
""" maintain a collection of Persist objects. """
def __init__(self, path, *args, **kwargs):
assert path
self.path = path
d = [os.sep, ]
logging.warn("path is %s" % self.path)
for p in path.split(os.sep):
if not p: continue
d.append(p)
pp = os.sep.join(d)
try:
os.mkdir(pp)
logging.warn("creating %s dir" % pp)
except OSError, ex:
if 'Errno 13' in str(ex) or 'Errno 2' in str(ex) or "Errno 17" in str(ex): continue
logging.warn("can't make %s - %s" % (pp,str(ex))) ; continue
def filenames(self, filter=[], path=None, skip=[], result=[]):
target = path or self.path
res = findfilenames(target, filter, skip)
logging.warn("filenames are %s" % str(res))
return res
def names(self, filter=[], path=None, skip=[], result=[]):
target = path or self.path
res = findnames(target, filter, skip)
return res
def search(self, field, target):
res = []
for obj in self.objects().values():
try: item = getattr(obj.data, field)
except AttributeError: handle_exception() ; continue
if not item: continue
if target in item: res.append(obj)
return res
def objects(self, filter=[], path=None):
if type(filter) != types.ListType: filter = [filter, ]
res = {}
target = path or self.path
for f in self.filenames(filter, target):
res[f] = Persist(f)
return res
## PlugPersistCollection class
class PlugPersistCollection(PersistCollection):
def __init__(self):
plugname = calledfrom(sys._getframe())
logging.warn("plugin is %s" % plugname)
self.path = getdatadir() + os.sep + 'plugs' + os.sep + stripname(plugname) + os.sep
PersistCollection.__init__(self, self.path)
## GlobalPersistCollection class
class GlobalPersistCollection(PersistCollection):
def __init__(self):
self.path = getdatadir() + os.sep + 'globals'
GlobalCollection(self, self.path)
callbacks.add("TICK60", cleanup)
| mit | -6,698,152,806,198,385,000 | 32.909348 | 159 | 0.568421 | false | 3.739456 | false | false | false |
jithinbp/vLabtool-v0 | v0/experiment.py | 2 | 18475 | # Set the QT API to PyQt4
import os
import pkg_resources
os.environ['QT_API'] = 'pyqt'
import sip
sip.setapi("QString", 2)
sip.setapi("QVariant", 2)
from PyQt4 import QtGui,QtCore
import sys
import functools,random
from templates import template_exp
import time,sys
import custom_widgets as Widgets
import numpy as np
import sys
class ConvenienceClass():
"""
This class contains methods that simplify setting up and running
an experiment.
The :func:`arbitFit` method accepts two arrays, the fitting function,
and a keyword argument 'guess' that is an array containing
guess values for the various fiting parameters.
Guess values can be obtained using the :func:`getGuessValues` based on
a keyword argument 'func' which as of this moment can be either 'sine'
or 'damped sine'
"""
timers=[]
def __init__(self):
print 'initializing convenience class'
try:
import scipy.optimize as optimize
import scipy.fftpack as fftpack
except ImportError:
print 'imports failed for scipy.optimize,scipy.fftpack'
self.optimize = None;self.fftpack=None
else:
self.optimize = optimize;self.fftpack=fftpack
self.timers=[]
def loopTask(self,interval,func,*args):
"""
Creates a QTimer that executes 'func' every 'interval' milliseconds
all additional arguments passed to this function are passed on as
arguments to func
Refer to the source code for experiments such as diodeIV, Bandpass filter etc.
"""
timer = QtCore.QTimer()
timerCallback = functools.partial(func,*args)
timer.timeout.connect(timerCallback)
timer.start(interval)
self.timers.append(timer)
return timer
def delayedTask(self,interval,func,*args):
"""
Creates a QTimer that executes 'func' once after 'interval' milliseconds.
all additional arguments passed to this function are passed on as
arguments to func
"""
timer = QtCore.QTimer()
timerCallback = functools.partial(func,*args)
timer.singleShot(interval,timerCallback)
self.timers.append(timer)
def random_color(self):
c=QtGui.QColor(random.randint(20,255),random.randint(20,255),random.randint(20,255))
if np.average(c.getRgb())<150:
c=self.random_color()
return c
def displayObjectContents(self,d):
"""
The contents of the dictionary 'd' are displayed in a new QWindow
"""
self.tree = self.pg.DataTreeWidget(data=d)
self.tree.show()
self.tree.setWindowTitle('Data')
self.tree.resize(600,600)
def dampedSine(self,x, amp, freq, phase,offset,damp):
"""
A damped sine wave function
"""
return offset + amp*np.exp(-damp*x)*np.sin(abs(freq)*x + phase)
def fitData(self,xReal,yReal,**args):
def mysine(x, a1, a2, a3,a4):
return a4 + a1*np.sin(abs(a2)*x + a3)
N=len(xReal)
yhat = self.fftpack.rfft(yReal)
idx = (yhat**2).argmax()
freqs = self.fftpack.rfftfreq(N, d = (xReal[1]-xReal[0])/(2*np.pi))
frequency = freqs[idx]
amplitude = (yReal.max()-yReal.min())/2.0
offset = yReal.max()-yReal.min()
frequency=args.get('frequency',1e6*abs(frequency)/(2*np.pi))*(2*np.pi)/1e6
phase=args.get('phase',0.)
guess = [amplitude, frequency, phase,offset]
try:
(amplitude, frequency, phase,offset), pcov = self.optimize.curve_fit(mysine, xReal, yReal, guess)
ph = ((phase)*180/(np.pi))
if(frequency<0):
#print 'negative frq'
return 0,0,0,0,pcov
if(amplitude<0):
#print 'AMP<0'
ph-=180
if(ph<-90):ph+=360
if(ph>360):ph-=360
freq=1e6*abs(frequency)/(2*np.pi)
amp=abs(amplitude)
if(frequency): period = 1./frequency
else: period = 0
pcov[0]*=1e6
return amp,freq,ph,offset,pcov
except:
return 0,0,0,0,[[]]
def getGuessValues(self,xReal,yReal,func='sine'):
if(func=='sine' or func=='damped sine'):
N=len(xReal)
offset = np.average(yReal)
yhat = self.fftpack.rfft(yReal-offset)
idx = (yhat**2).argmax()
freqs = self.fftpack.rfftfreq(N, d = (xReal[1]-xReal[0])/(2*np.pi))
frequency = freqs[idx]
amplitude = (yReal.max()-yReal.min())/2.0
phase=0.
if func=='sine':
return amplitude, frequency, phase,offset
if func=='damped sine':
return amplitude, frequency, phase,offset,0
def arbitFit(self,xReal,yReal,func,**args):
N=len(xReal)
guess=args.get('guess',[])
try:
results, pcov = self.optimize.curve_fit(func, xReal, yReal,guess)
pcov[0]*=1e6
return True,results,pcov
except:
return False,[],[]
class Experiment(QtGui.QMainWindow,template_exp.Ui_MainWindow,Widgets.CustomWidgets):
timers=[]
def __init__(self,**args):
self.qt_app = args.get('qt_app',QtGui.QApplication(sys.argv))
self.showSplash()
super(Experiment, self).__init__(args.get('parent',None))
self.updateSplash(10)
try:
import pyqtgraph as pg
import pyqtgraph.opengl as gl
except ImportError:
self.pg = None;self.gl=None
else:
self.pg = pg
self.gl=gl
self.updateSplash(10)
self.setupUi(self)
Widgets.CustomWidgets.__init__(self);self.updateSplash(10)
self.I = args.get('I',None)
self.graphContainer2_enabled=False
self.graphContainer1_enabled=False
self.console_enabled=False
self.output_enabled=False
self.viewBoxes=[]
self.plot_areas=[]
self.plots3D=[]
self.plots2D=[]
self.axisItems=[]
self.total_plot_areas=0
self.widgetBay = False
self.help_url = pkg_resources.resource_filename(__name__, os.path.join('helpfiles','interface.html'))
#self.additional_handle = QSplitterHandle(Qt.Horizontal,self.graph_splitter)
#self.graph_splitter.addWidget(self.additional_handle)
if(args.get('showresult',True)):
dock = QtGui.QDockWidget()
dock.setFeatures(QtGui.QDockWidget.DockWidgetMovable|QtGui.QDockWidget.DockWidgetFloatable)#|QDockWidget.DockWidgetVerticalTitleBar)
dock.setWindowTitle("Results")
self.output_text = QtGui.QTextEdit()
self.output_text.setReadOnly(True)
fr = QtGui.QFrame()
plt = QtGui.QGridLayout(fr)
plt.setMargin(0)
plt.addWidget(self.output_text)
self.output_enabled=True
sys.stdout = self.relay_to_console(self.output_text)
dock.setWidget(fr)
self.result_dock=dock
self.output_text.setStyleSheet("color: rgb(255, 255, 255);")
self.addDockWidget(QtCore.Qt.BottomDockWidgetArea, dock)
else:
self.result_dock=False
self.output_enabled=False
self.updateSplash(10)
if(args.get('handler',False)):
self.addHandler(args.get('handler'))
while(self.progressBar.value()<100):
self.updateSplash(1)
time.sleep(0.01)
def updateSplash(self,x,txt=''):
self.progressBar.setValue(self.progressBar.value()+x)
if(len(txt)):self.splashMsg.setText(' '+txt)
self.qt_app.processEvents()
self.splash.repaint()
def showSplash(self):
import pkg_resources
splash_pix = QtGui.QPixmap(pkg_resources.resource_filename('v0.stylesheets', "splash3.png"))
self.splash = QtGui.QSplashScreen(splash_pix, QtCore.Qt.WindowStaysOnTopHint)
# adding progress bar
self.progressBar = QtGui.QProgressBar(self.splash)
self.progressBar.resize(self.splash.width(),20)
css = pkg_resources.resource_string('v0', "stylesheets/splash.css")
if css:
self.splash.setStyleSheet(css)
self.splashMsg = QtGui.QLabel(self.splash);self.splashMsg.setStyleSheet("font-weight:bold;color:purple")
self.splash.setMask(splash_pix.mask())
self.splashMsg.setText('Loading....');self.splashMsg.resize(self.progressBar.width(),20)
self.splash.show()
self.splash.repaint()
def run(self):
def __resizeHack__():
if self.result_dock:
self.result_dock.setMaximumHeight(100)
self.result_dock.setMaximumHeight(2500)
self.delayedTask(0,__resizeHack__)
self.show()
self.splash.finish(self)
self.qt_app.exec_()
def addPlotArea(self):
fr = QtGui.QFrame(self.graph_splitter)
fr.setFrameShape(QtGui.QFrame.StyledPanel)
fr.setFrameShadow(QtGui.QFrame.Raised)
fr.setMinimumHeight(250)
self.total_plot_areas+=1
fr.setObjectName("plot"+str(self.total_plot_areas))
plt = QtGui.QGridLayout(fr)
plt.setMargin(0)
self.plot_areas.append(plt)
return len(self.plot_areas)-1
def add3DPlot(self):
plot3d = self.gl.GLViewWidget()
#gx = gl.GLGridItem();gx.rotate(90, 0, 1, 0);gx.translate(-10, 0, 0);self.plot.addItem(gx)
#gy = gl.GLGridItem();gy.rotate(90, 1, 0, 0);gy.translate(0, -10, 0);self.plot.addItem(gy)
gz = self.gl.GLGridItem();#gz.translate(0, 0, -10);
plot3d.addItem(gz);
plot3d.opts['distance'] = 40
plot3d.opts['elevation'] = 5
plot3d.opts['azimuth'] = 20
pos=self.addPlotArea()
self.plot_areas[pos].addWidget(plot3d)
self.plots3D.append(plot3d)
plot3d.plotLines3D=[]
return plot3d
def add2DPlot(self):
plot=self.pg.PlotWidget()
pos=self.addPlotArea()
self.plot_areas[pos].addWidget(plot)
plot.viewBoxes=[]
self.plotLegend=plot.addLegend(offset=(-1,1))
self.plots2D.append(plot)
return plot
def add2DPlots(self,num):
for a in range(num):yield self.add2DPlot()
def add3DPlots(self,num):
for a in range(num):yield self.add3DPlot()
def addAxis(self,plot,**args):
p3 = self.pg.ViewBox()
ax3 = self.pg.AxisItem('right')
plot.plotItem.layout.addItem(ax3, 2, 3+len(self.axisItems))
plot.plotItem.scene().addItem(p3)
ax3.linkToView(p3)
p3.setXLink(plot.plotItem)
ax3.setZValue(-10000)
if args.get('label',False):
ax3.setLabel(args.get('label',False), color=args.get('color','#ffffff'))
plot.viewBoxes.append(p3)
p3.setGeometry(plot.plotItem.vb.sceneBoundingRect())
p3.linkedViewChanged(plot.plotItem.vb, p3.XAxis)
## Handle view resizing
Callback = functools.partial(self.updateViews,plot)
plot.getViewBox().sigStateChanged.connect(Callback)
self.axisItems.append(ax3)
return p3
def enableRightAxis(self,plot):
p = self.pg.ViewBox()
plot.showAxis('right')
plot.setMenuEnabled(False)
plot.scene().addItem(p)
plot.getAxis('right').linkToView(p)
p.setXLink(plot)
plot.viewBoxes.append(p)
Callback = functools.partial(self.updateViews,plot)
plot.getViewBox().sigStateChanged.connect(Callback)
return p
def updateViews(self,plot):
for a in plot.viewBoxes:
a.setGeometry(plot.getViewBox().sceneBoundingRect())
a.linkedViewChanged(plot.plotItem.vb, a.XAxis)
def configureWidgetBay(self,name='controls'):
if(self.widgetBay):return
dock = QtGui.QDockWidget()
dock.setFeatures(QtGui.QDockWidget.DockWidgetMovable|QtGui.QDockWidget.DockWidgetFloatable)#|QDockWidget.DockWidgetVerticalTitleBar)
dock.setWindowTitle(name)
fr = QtGui.QFrame()
fr.setStyleSheet("QLineEdit {color: rgb(0,0,0);}QPushButton, QLabel ,QComboBox{color: rgb(255, 255, 255);}")
dock.setWidget(fr)
self.addDockWidget(QtCore.Qt.LeftDockWidgetArea, dock)
self.frame_area = QtGui.QVBoxLayout(fr)
self.frame_area.setMargin(0)
self.widgetBay = True
def updateWidgetBay(self,obj):
self.configureWidgetBay()
self.frame_area.addWidget(obj)
def addHandler(self,handler,name = 'Controls'):
'''
Add handler instance(subclass of QFrame) to the left side of the window.
The contents of the handler are QWidgets which control various aspects
of the experiment that the handler has been designed for.
'''
self.configureWidgetBay(name)
self.frame=handler
self.updateWidgetBay(self.frame)
#self.updateWidgetBay(self.frame)
try:
self.I = handler.I
if(self.console_enabled):
self.ipyConsole.pushVariables({"I":self.I})
self.ipyConsole.printText("Access hardware using the Instance 'I'. e.g. I.get_average_voltage(0)")
except:
print 'Device Not Connected.'
def addConsole(self,**args):
try:
#read arguments
self.I = args.get('I',self.I)
self.showSplash();self.updateSplash(10,'Importing iPython Widgets...')
from iPythonEmbed import QIPythonWidget;self.updateSplash(10,'Creating Dock Widget...')
#-------create an area for it to sit------
dock = QtGui.QDockWidget()
dock.setFeatures(QtGui.QDockWidget.DockWidgetMovable|QtGui.QDockWidget.DockWidgetFloatable)#|QDockWidget.DockWidgetVerticalTitleBar)
dock.setWindowTitle("Interactive Python Console")
fr = QtGui.QFrame();self.updateSplash(10)
dock.setWidget(fr)
self.addDockWidget(QtCore.Qt.BottomDockWidgetArea, dock)
fr.setFrameShape(QtGui.QFrame.StyledPanel)
fr.setFrameShadow(QtGui.QFrame.Raised);self.updateSplash(10,'Embedding IPython Widget...')
#--------instantiate the iPython class-------
self.ipyConsole = QIPythonWidget(customBanner="An interactive Python Console!\n");self.updateSplash(10)
layout = QtGui.QVBoxLayout(fr)
layout.setMargin(0)
layout.addWidget(self.ipyConsole);self.updateSplash(10,'Preparing default command dictionary...')
cmdDict = {"delayedTask":self.delayedTask,"loopTask":self.loopTask,"addWidget":self.addWidget,"setCommand":self.setCommand,"Widgets":Widgets}
#if self.graphContainer1_enabled:cmdDict["graph"]=self.graph
if self.I :
cmdDict["I"]=self.I
self.ipyConsole.printText("Access hardware using the Instance 'I'. e.g. I.get_average_voltage('CH1')")
self.ipyConsole.pushVariables(cmdDict);self.updateSplash(10,'Winding up...')
self.console_enabled=True
self.splash.finish(dock);self.updateSplash(10)
dock.widget().setMaximumSize(QtCore.QSize(self.width(), self.height()/3))
dock.widget().setMinimumSize(QtCore.QSize(self.width(), self.height()/3))
print dock.width(),dock.height()
def dockResize():
dock.widget().setMaximumSize(65535,65535)
dock.widget().setMinimumSize(60,60)
self.delayedTask(0,dockResize)
return self.ipyConsole
except:
self.splash.finish(self);self.updateSplash(10)
errbox = QtGui.QMessageBox()
errbox.setStyleSheet('background:#fff;')
print errbox.styleSheet()
errbox.about(self, "Error", "iPython-qtconsole not found.\n Please Install the module")
def showHelp(self):
from PyQt4 import QtWebKit
dock = QtGui.QMainWindow()
self.helpView = QtWebKit.QWebView()
dock.setCentralWidget(self.helpView)
dock.setWindowTitle("Help window")
dock.show()
self.helpView.setUrl(QtCore.QUrl(self.help_url))
self.helpWindow = dock
def showFullHelp(self):
from PyQt4 import QtWebKit
dock = QtGui.QMainWindow()
self.helpView = QtWebKit.QWebView()
dock.setCentralWidget(self.helpView)
dock.setWindowTitle("Help window")
dock.show()
URL = pkg_resources.resource_filename(__name__, os.path.join('helpfiles','interface.html'))
self.helpView.setUrl(QtCore.QUrl(URL))
self.fullHelpWindow = dock
def showImageMap(self):
from PyQt4 import QtWebKit
dock = QtGui.QMainWindow()
self.helpView = QtWebKit.QWebView()
dock.setCentralWidget(self.helpView)
dock.setWindowTitle("Help window")
dock.show()
URL = pkg_resources.resource_filename(__name__, os.path.join('helpfiles','imagemap.html'))
self.helpView.setUrl(QtCore.QUrl(URL))
self.imageMapHelp = dock
def setHelpUrl(self,url):
if 'http' in url:
self.help_url = url
else:
self.help_url = pkg_resources.resource_filename(__name__, os.path.join('helpfiles',url))
def new3dSurface(self,plot,**args):
import scipy.ndimage as ndi
surface3d = self.gl.GLSurfacePlotItem(z=np.array([[0.1,0.1],[0.1,0.1]]), **args)
#surface3d.shader()['colorMap']=self.pg.ColorMap(np.array([0.2,0.4,0.6]),np.array([[255,0,0,255],[0,255,0,255],[0,255,255,255]])).getLookupTable()
#surface3d.shader()['colorMap'] = np.array([0.2, 2, 0.5, 0.2, 1, 1, 0.2, 0, 2])
plot.addItem(surface3d)
return surface3d
def setSurfaceData(self,surf,z):
surf.setData(z=np.array(z))
def draw3dLine(self,plot,x,y,z,color=(100,100,100)):
pts = np.vstack([x,y,z]).transpose()
plt = self.gl.GLLinePlotItem(pos=pts, color=self.pg.glColor(color),width=2)
plot.addItem(plt)
plot.plotLines3D.append(plt)
return plt
def clearLinesOnPlane(self,plot):
for a in plot.plotLines3D:
plot.removeItem(a)# a.setData(pos=[[0,0,0]])
plot.plotLines3D=[]
class relay_to_console():
def __init__(self,console):
self.console = console
self.cursor = self.console.textCursor()
self.scroll=self.console.verticalScrollBar()
def write(self,arg):
f=open('b.txt','at')
self.cursor.movePosition(QtGui.QTextCursor.End)
self.console.setTextCursor(self.cursor)
self.console.insertPlainText(arg)
#self.scroll.setValue(self.scroll.maximum())
f.write(arg)
def flush(self):
pass
def graph(self,x,y):
if(self.graphContainer1_enabled): self.reserved_curve.setData(x,y)
def setRange(self,plot,x,y,width,height):
plot.setRange(QtCore.QRectF(x,y,width,height))
def addCurve(self,plot,name='',col=(255,255,255),axis='left'):
#if(len(name)):curve = plot.plot(name=name)
#else:curve = plot.plot()
if(len(name)):curve = self.pg.PlotCurveItem(name=name)
else:curve = self.pg.PlotCurveItem()
plot.addItem(curve)
curve.setPen(color=col, width=1)
return curve
def rebuildLegend(self,plot):
self.plotLegend = plot.addLegend(offset=(-10,30))
def loopTask(self,interval,func,*args):
timer = QtCore.QTimer()
timerCallback = functools.partial(func,*args)
timer.timeout.connect(timerCallback)
timer.start(interval)
self.timers.append(timer)
return timer
def delayedTask(self,interval,func,*args):
timer = QtCore.QTimer()
timerCallback = functools.partial(func,*args)
timer.singleShot(interval,timerCallback)
self.timers.append(timer)
def addButton(self,name,command,*args):
b=QtGui.QPushButton(None)
b.setText(name)
self.updateWidgetBay(b)
self.setCommand(b,"clicked()",command,*args)
return b
def addWidget(self,widget_type,**args):
b=widget_type(**args)
if(args.has_key('object_name')): b.setObjectName(args.get('object_name'))
if(args.has_key('text')): b.setText(args.get('text'))
if(args.has_key('items')):
for a in args.get('items'): b.addItem(a)
self.updateWidgetBay(b)
return b
def setCommand(self,widget,signal,slot,*args):
buttonCallback = functools.partial(slot,*args)
QObject.connect(widget, SIGNAL(signal), buttonCallback)
'''
class WorkThread(QtCore.QThread):
punched = QtCore.pyqtSignal()
def __init__(self):
QtCore.QThread.__init__(self)
def __del__(self):
self.wait()
def run(self):
for i in range(11):
time.sleep(0.5)
self.punched.emit()
self.terminate()
progress = QtGui.QProgressDialog("Copying...", "Cancel", 0, 10)
progress.show()
T = self.WorkThread()
T.punched.connect(lambda: progress.setValue(progress.value()+1))
T.start()
'''
| gpl-3.0 | -2,923,714,953,830,845,400 | 31.355517 | 149 | 0.704736 | false | 2.878623 | false | false | false |
lhl/vrdev | 013-glfw-pyopengl/run.py | 1 | 1480 | #!/usr/bin/env python
import envoy
import subprocess
import sys
import time
import logging
from watchdog.observers import Observer
from watchdog.events import FileSystemEventHandler, LoggingEventHandler
try:
WATCH = sys.argv[1]
except:
WATCH = '001-initial.py'
LASTRUN = 0
class watcher(FileSystemEventHandler):
def on_modified(self, event):
global WATCH, LASTRUN
what = 'directory' if event.is_directory else 'file'
if what == 'file' and event.src_path.startswith('./%s' % WATCH[:3]) and time.time()-LASTRUN > 2.0:
LASTRUN = time.time()
logging.info("Modified %s: %s", what, event.src_path)
# Record the active window
r = envoy.run('xdotool getactivewindow')
window_id = r.std_out.strip()
envoy.run('pkill -x -f "python %s"' % WATCH)
proc = subprocess.Popen(['python %s' % WATCH], shell=True, stdin=None, stdout=None, stderr=None, close_fds=True)
# Restore the active window
time.sleep(1.0)
envoy.run('xdotool windowactivate %s' % window_id)
if __name__ == "__main__":
logging.basicConfig(level=logging.INFO,
format='%(asctime)s - %(message)s',
datefmt='%Y-%m-%d %H:%M:%S')
# path = sys.argv[1] if len(sys.argv) > 1 else '.'
path = '.'
observer = Observer()
observer.schedule(watcher(), path, recursive=True)
observer.start()
try:
while True:
time.sleep(1)
except KeyboardInterrupt:
observer.stop()
observer.join()
| apache-2.0 | -4,814,636,934,122,627,000 | 26.924528 | 118 | 0.641216 | false | 3.288889 | false | false | false |
drvinceknight/Axelrod | axelrod/strategies/grumpy.py | 1 | 1668 | from axelrod import Player
class Grumpy(Player):
"""A player that defects after a ceratin level of grumpiness. Grumpiness increases when the opponent defects and decreases when the opponent co-operates."""
name = 'Grumpy'
def __init__(self, starting_state = 'Nice', grumpy_threshold = 10, nice_threshold = -10):
"""Player starts of nice be default with set thresholds"""
super(Grumpy, self).__init__()
self.history = []
self.score = 0
self.state = starting_state
self.starting_state = starting_state
self.grumpy_threshold = grumpy_threshold
self.nice_threshold = nice_threshold
def strategy(self, opponent):
"""A player that gets grumpier the more the opposition defects, and nicer the more they cooperate.
Starts off Nice, but becomes grumpy once the grumpiness threshold is hit.
Won't become nice once that grumpy threshold is hit, but must reach a much lower threshold before it becomes nice again.
"""
self.grumpiness = sum(play=='D' for play in opponent.history) - sum(play=='C' for play in opponent.history)
if self.state == 'Nice':
if self.grumpiness > self.grumpy_threshold:
self.state = 'Grumpy'
return 'D'
return 'C'
if self.state == 'Grumpy':
if self.grumpiness < self.nice_threshold:
self.state = 'Nice'
return 'C'
return 'D'
def reset(self):
"""Resets score, history and state for the next round of the tournement."""
self.history = []
self.state = self.starting_state
| mit | 951,406,063,209,069,400 | 37.790698 | 160 | 0.619305 | false | 3.971429 | false | false | false |
Jumpscale/ays9 | tests/test_services/test_validate_service_args/actions.py | 1 | 1737 | def init_actions_(service, args):
"""
this needs to returns an array of actions representing the depencies between actions.
Looks at ACTION_DEPS in this module for an example of what is expected
"""
# some default logic for simple actions
return {
'test': ['install']
}
def test(job):
"""
Test the created directory structure is corrected after ays blueprint on a test repo
"""
import sys
RESULT_OK = 'OK : %s'
RESULT_FAILED = 'FAILED : %s'
RESULT_ERROR = 'ERROR : %s %%s' % job.service.name
model = job.service.model
model.data.result = RESULT_OK % job.service.name
failures = []
blueprints = {
'bp_args_with_dot.yaml': True,
'bp_args_with_underscore.yaml': True,
'bp_valid_args.yaml': True,
'bp_non_exists_args.yaml': False,
}
repo = None
try:
repo = 'sample_repo4'
cl = j.clients.atyourservice.get().api.ays
for bp_name, should_success in blueprints.items():
try:
bp_resp = cl.executeBlueprint(data=None, repository=repo, blueprint=bp_name)
if not should_success and bp_resp.status_code == 200:
failures.append("blueprint %s should have failed" % bp_name)
except Exception as e:
if should_success:
failures.append("blueprint %s should have succeded : %s" % (bp_name, str(e)))
if failures:
model.data.result = RESULT_FAILED % '\n'.join(failures)
except:
model.data.result = RESULT_ERROR % str(sys.exc_info()[:2])
finally:
job.service.save()
if repo:
cl.destroyRepository(data=None, repository=repo)
| apache-2.0 | -3,026,263,630,030,186,000 | 28.948276 | 97 | 0.586644 | false | 3.86 | false | false | false |
elkingtowa/pyrake | tests/test_spidermanager/__init__.py | 1 | 2733 | import sys
import os
import shutil
from zope.interface.verify import verifyObject
from twisted.trial import unittest
# ugly hack to avoid cyclic imports of pyrake.spider when running this test
# alone
from pyrake.interfaces import ISpiderManager
from pyrake.spidermanager import SpiderManager
from pyrake.http import Request
module_dir = os.path.dirname(os.path.abspath(__file__))
class SpiderManagerTest(unittest.TestCase):
def setUp(self):
orig_spiders_dir = os.path.join(module_dir, 'test_spiders')
self.tmpdir = self.mktemp()
os.mkdir(self.tmpdir)
self.spiders_dir = os.path.join(self.tmpdir, 'test_spiders_xxx')
shutil.copytree(orig_spiders_dir, self.spiders_dir)
sys.path.append(self.tmpdir)
self.spiderman = SpiderManager(['test_spiders_xxx'])
def tearDown(self):
del self.spiderman
del sys.modules['test_spiders_xxx']
sys.path.remove(self.tmpdir)
def test_interface(self):
verifyObject(ISpiderManager, self.spiderman)
def test_list(self):
self.assertEqual(set(self.spiderman.list()),
set(['spider1', 'spider2', 'spider3', 'spider4']))
def test_create(self):
spider1 = self.spiderman.create("spider1")
self.assertEqual(spider1.__class__.__name__, 'Spider1')
spider2 = self.spiderman.create("spider2", foo="bar")
self.assertEqual(spider2.__class__.__name__, 'Spider2')
self.assertEqual(spider2.foo, 'bar')
def test_find_by_request(self):
self.assertEqual(self.spiderman.find_by_request(Request('http://pyrake1.org/test')),
['spider1'])
self.assertEqual(self.spiderman.find_by_request(Request('http://pyrake2.org/test')),
['spider2'])
self.assertEqual(set(self.spiderman.find_by_request(Request('http://pyrake3.org/test'))),
set(['spider1', 'spider2']))
self.assertEqual(self.spiderman.find_by_request(Request('http://pyrake999.org/test')),
[])
self.assertEqual(self.spiderman.find_by_request(Request('http://spider3.com')),
[])
self.assertEqual(self.spiderman.find_by_request(Request('http://spider3.com/onlythis')),
['spider3'])
def test_load_spider_module(self):
self.spiderman = SpiderManager(['tests.test_spidermanager.test_spiders.spider1'])
assert len(self.spiderman._spiders) == 1
def test_load_base_spider(self):
self.spiderman = SpiderManager(['tests.test_spidermanager.test_spiders.spider0'])
assert len(self.spiderman._spiders) == 0
def test_load_from_crawler(self):
spider = self.spiderman.create('spider4', a='OK')
self.assertEqual(spider.a, 'OK')
| mit | -162,390,153,043,694,340 | 37.492958 | 97 | 0.66008 | false | 3.544747 | true | false | false |
barseghyanartur/django-haystack | haystack/management/commands/clear_index.py | 12 | 2182 | # encoding: utf-8
from __future__ import absolute_import, division, print_function, unicode_literals
from django.core.management.base import BaseCommand
from django.utils import six
from haystack import connections
class Command(BaseCommand):
help = "Clears out the search index completely."
def add_arguments(self, parser):
parser.add_argument(
'--noinput', action='store_false', dest='interactive', default=True,
help='If provided, no prompts will be issued to the user and the data will be wiped out.'
)
parser.add_argument(
"-u", "--using", action="append", default=[],
help='Update only the named backend (can be used multiple times). '
'By default all backends will be updated.'
)
parser.add_argument(
'--nocommit', action='store_false', dest='commit',
default=True, help='Will pass commit=False to the backend.'
)
def handle(self, **options):
"""Clears out the search index completely."""
self.verbosity = int(options.get('verbosity', 1))
self.commit = options.get('commit', True)
using = options.get('using')
if not using:
using = connections.connections_info.keys()
if options.get('interactive', True):
self.stdout.write("WARNING: This will irreparably remove EVERYTHING from your search index in connection '%s'." % "', '".join(using))
self.stdout.write("Your choices after this are to restore from backups or rebuild via the `rebuild_index` command.")
yes_or_no = six.moves.input("Are you sure you wish to continue? [y/N] ")
if not yes_or_no.lower().startswith('y'):
self.stdout.write("No action taken.")
return
if self.verbosity >= 1:
self.stdout.write("Removing all documents from your index because you said so.")
for backend_name in using:
backend = connections[backend_name].get_backend()
backend.clear(commit=self.commit)
if self.verbosity >= 1:
self.stdout.write("All documents removed.")
| bsd-3-clause | -8,526,028,623,413,117,000 | 37.964286 | 145 | 0.617782 | false | 4.337972 | false | false | false |
koriaf/djfp2 | src/djfp2/settings/base.py | 1 | 2704 | import os
from envparse import env
BASE_DIR = os.path.dirname(os.path.dirname(__file__))
DEBUG = env.bool('PL_DEBUG', default=True)
SECRET_KEY = env('PL_SECRET_KEY', default="DefaultSecretKey")
ALLOWED_HOSTS = ['*'] # change to actual before production
CSRF_COOKIE_SECURE = not DEBUG # change it to False if you are not using HTTPS
# Application definition
INSTALLED_APPS = (
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.messages',
'django.contrib.staticfiles',
'djfp2',
'djfp2.calendar',
)
MIDDLEWARE_CLASSES = (
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.auth.middleware.SessionAuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
)
TEMPLATES = [
{
'BACKEND': 'django.template.backends.django.DjangoTemplates',
'DIRS': [],
'APP_DIRS': True,
'OPTIONS': {
'debug': DEBUG,
'context_processors': [
'django.template.context_processors.debug',
'django.template.context_processors.request',
'django.contrib.auth.context_processors.auth',
'django.contrib.messages.context_processors.messages',
],
},
},
]
ROOT_URLCONF = 'djfp2.urls'
WSGI_APPLICATION = 'djfp2.wsgi.application'
AUTH_PASSWORD_VALIDATORS = []
LANGUAGE_CODE = 'en-us'
TIME_ZONE = 'UTC'
USE_I18N = False
USE_L10N = False
USE_TZ = True
TIME_ZONE = env('PL_TIMEZONE', default='UTC')
STATIC_URL = '/static/'
MEDIA_URL = '/static/media/'
# for collectstatic
STATIC_ROOT = env(
'PL_STATIC_ROOT',
default=os.path.join(BASE_DIR, "../../var/static_root")
)
DATABASES = {
'default': {
'ENGINE': env('PL_DB_ENGINE', default='django.db.backends.postgresql_psycopg2'),
'NAME': env('PL_DB_NAME', default='django_planner'),
'HOST': env('PL_DB_HOST', default='db'),
'PORT': env('PL_DB_PORT', default=5432),
'USER': env('PL_DB_USERNAME', default='django_planner'),
'PASSWORD': env('PL_DB_PASSWORD', default='replace it in django.env file'),
'ATOMIC_REQUESTS': True,
}
}
RAVEN_DSN = env('PL_RAVEN_DSN', default=None)
if RAVEN_DSN:
INSTALLED_APPS += [
'raven.contrib.django.raven_compat',
]
RAVEN_CONFIG = {
'dsn': RAVEN_DSN,
}
EMAIL_BACKEND = 'django.core.mail.backends.console.EmailBackend'
| apache-2.0 | 4,497,029,765,264,549,000 | 27.166667 | 88 | 0.64645 | false | 3.444586 | false | false | false |
lucalianas/ProMort | promort/rois_manager/migrations/0015_focusregion_tissue_status.py | 2 | 1660 | # -*- coding: utf-8 -*-
# Copyright (c) 2019, CRS4
#
# Permission is hereby granted, free of charge, to any person obtaining a copy of
# this software and associated documentation files (the "Software"), to deal in
# the Software without restriction, including without limitation the rights to
# use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of
# the Software, and to permit persons to whom the Software is furnished to do so,
# subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in all
# copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS
# FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR
# COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER
# IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN
# CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
# Generated by Django 1.11.5 on 2018-10-09 12:40
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('rois_manager', '0014_auto_20171109_1015'),
]
operations = [
migrations.AddField(
model_name='focusregion',
name='tissue_status',
field=models.CharField(blank=True, choices=[(b'NORMAL', b'Normal'), (b'STRESSED', b'Stressed'), (b'TUMOR', b'Tumor')], max_length=8),
),
]
| mit | -2,482,427,241,874,219,000 | 40.5 | 145 | 0.713253 | false | 3.980815 | false | false | false |